[ 481.090658] env[62383]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62383) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 481.091055] env[62383]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62383) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 481.091055] env[62383]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62383) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 481.091405] env[62383]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 481.186154] env[62383]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62383) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 481.197899] env[62383]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.012s {{(pid=62383) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 481.799613] env[62383]: INFO nova.virt.driver [None req-58d4bdab-466a-40a8-aa38-69f32a95dc06 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 481.869607] env[62383]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 481.869805] env[62383]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 481.869871] env[62383]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62383) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 485.060213] env[62383]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-3783f023-a0d3-4246-a8b4-29098c954db1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.076197] env[62383]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62383) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 485.076328] env[62383]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-e758833f-263c-45aa-902b-39e0f133b733 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.107538] env[62383]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 0a774. [ 485.107663] env[62383]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.238s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 485.108186] env[62383]: INFO nova.virt.vmwareapi.driver [None req-58d4bdab-466a-40a8-aa38-69f32a95dc06 None None] VMware vCenter version: 7.0.3 [ 485.111736] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a62c0c-1e02-40b2-be8b-82fe89766164 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.129955] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a39007-c47e-46ca-8e55-570e5c723cff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.135989] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d5f58a-647a-45f6-abfa-714fa1991038 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.142664] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe92f57-2084-441e-b361-61b28bd31202 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.155776] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04688623-b7a9-403c-b419-962507535827 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.161724] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5abe5a-d1e2-46aa-a37a-8fe610ee8c9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.191533] env[62383]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-ba34c0f9-56bf-4a8d-a9cf-c28ab5276cf4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 485.196514] env[62383]: DEBUG nova.virt.vmwareapi.driver [None req-58d4bdab-466a-40a8-aa38-69f32a95dc06 None None] Extension org.openstack.compute already exists. {{(pid=62383) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 485.199328] env[62383]: INFO nova.compute.provider_config [None req-58d4bdab-466a-40a8-aa38-69f32a95dc06 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 485.703128] env[62383]: DEBUG nova.context [None req-58d4bdab-466a-40a8-aa38-69f32a95dc06 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),6418ac63-c037-42da-a367-8f89f24009c1(cell1) {{(pid=62383) load_cells /opt/stack/nova/nova/context.py:464}} [ 485.706184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 485.706501] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 485.707228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 485.707670] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Acquiring lock "6418ac63-c037-42da-a367-8f89f24009c1" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 485.707862] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Lock "6418ac63-c037-42da-a367-8f89f24009c1" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 485.708922] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Lock "6418ac63-c037-42da-a367-8f89f24009c1" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 485.728935] env[62383]: INFO dbcounter [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Registered counter for database nova_cell0 [ 485.736933] env[62383]: INFO dbcounter [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Registered counter for database nova_cell1 [ 485.740039] env[62383]: DEBUG oslo_db.sqlalchemy.engines [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62383) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 485.740401] env[62383]: DEBUG oslo_db.sqlalchemy.engines [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62383) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 485.745137] env[62383]: ERROR nova.db.main.api [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 485.745137] env[62383]: result = function(*args, **kwargs) [ 485.745137] env[62383]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 485.745137] env[62383]: return func(*args, **kwargs) [ 485.745137] env[62383]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 485.745137] env[62383]: result = fn(*args, **kwargs) [ 485.745137] env[62383]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 485.745137] env[62383]: return f(*args, **kwargs) [ 485.745137] env[62383]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 485.745137] env[62383]: return db.service_get_minimum_version(context, binaries) [ 485.745137] env[62383]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 485.745137] env[62383]: _check_db_access() [ 485.745137] env[62383]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 485.745137] env[62383]: stacktrace = ''.join(traceback.format_stack()) [ 485.745137] env[62383]: [ 485.746117] env[62383]: ERROR nova.db.main.api [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 485.746117] env[62383]: result = function(*args, **kwargs) [ 485.746117] env[62383]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 485.746117] env[62383]: return func(*args, **kwargs) [ 485.746117] env[62383]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 485.746117] env[62383]: result = fn(*args, **kwargs) [ 485.746117] env[62383]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 485.746117] env[62383]: return f(*args, **kwargs) [ 485.746117] env[62383]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 485.746117] env[62383]: return db.service_get_minimum_version(context, binaries) [ 485.746117] env[62383]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 485.746117] env[62383]: _check_db_access() [ 485.746117] env[62383]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 485.746117] env[62383]: stacktrace = ''.join(traceback.format_stack()) [ 485.746117] env[62383]: [ 485.746546] env[62383]: WARNING nova.objects.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 485.746644] env[62383]: WARNING nova.objects.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Failed to get minimum service version for cell 6418ac63-c037-42da-a367-8f89f24009c1 [ 485.747077] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Acquiring lock "singleton_lock" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 485.747239] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Acquired lock "singleton_lock" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 485.747474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Releasing lock "singleton_lock" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 485.747788] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Full set of CONF: {{(pid=62383) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 485.747930] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ******************************************************************************** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 485.748069] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] Configuration options gathered from: {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 485.748242] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 485.748423] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 485.748554] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ================================================================================ {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 485.748759] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] allow_resize_to_same_host = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.748929] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] arq_binding_timeout = 300 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.749076] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] backdoor_port = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.749223] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] backdoor_socket = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.749405] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] block_device_allocate_retries = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.749569] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] block_device_allocate_retries_interval = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.749739] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cert = self.pem {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.749906] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.750091] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute_monitors = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.750262] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] config_dir = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.750434] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] config_drive_format = iso9660 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.750572] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.750728] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] config_source = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.750894] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] console_host = devstack {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.751068] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] control_exchange = nova {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.751230] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cpu_allocation_ratio = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.751389] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] daemon = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.751558] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] debug = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.751713] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] default_access_ip_network_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.751879] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] default_availability_zone = nova {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.752041] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] default_ephemeral_format = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.752205] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] default_green_pool_size = 1000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.752439] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.752601] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] default_schedule_zone = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.752755] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] disk_allocation_ratio = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.752915] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] enable_new_services = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.753100] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] enabled_apis = ['osapi_compute'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.753265] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] enabled_ssl_apis = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.753424] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] flat_injected = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.753580] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] force_config_drive = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.753734] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] force_raw_images = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.753901] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] graceful_shutdown_timeout = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.754072] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] heal_instance_info_cache_interval = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.754297] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] host = cpu-1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.754474] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.754635] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.754793] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.755017] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.755188] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instance_build_timeout = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.755349] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instance_delete_interval = 300 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.755518] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instance_format = [instance: %(uuid)s] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.755679] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instance_name_template = instance-%08x {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.755837] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instance_usage_audit = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.756050] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instance_usage_audit_period = month {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.756198] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.756376] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.756545] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] internal_service_availability_zone = internal {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.756701] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] key = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.756861] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] live_migration_retry_count = 30 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.757037] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_color = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.757204] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_config_append = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.757376] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.757532] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_dir = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.757688] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_file = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.757813] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_options = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.757971] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_rotate_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.758153] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_rotate_interval_type = days {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.758342] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] log_rotation_type = none {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.758482] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.758608] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.758780] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.758939] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.759076] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.759265] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] long_rpc_timeout = 1800 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.759439] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] max_concurrent_builds = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.759601] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] max_concurrent_live_migrations = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.759760] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] max_concurrent_snapshots = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.759917] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] max_local_block_devices = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.760087] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] max_logfile_count = 30 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.760253] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] max_logfile_size_mb = 200 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.760416] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] maximum_instance_delete_attempts = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.760584] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] metadata_listen = 0.0.0.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.760750] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] metadata_listen_port = 8775 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.760920] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] metadata_workers = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.761094] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] migrate_max_retries = -1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.761263] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] mkisofs_cmd = genisoimage {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.761469] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.761602] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] my_ip = 10.180.1.21 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.761802] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.761965] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] network_allocate_retries = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.762154] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.762323] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.762488] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] osapi_compute_listen_port = 8774 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.762655] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] osapi_compute_unique_server_name_scope = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.762823] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] osapi_compute_workers = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.762983] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] password_length = 12 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.763153] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] periodic_enable = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.763311] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] periodic_fuzzy_delay = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.763476] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] pointer_model = usbtablet {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.763640] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] preallocate_images = none {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.763797] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] publish_errors = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.763926] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] pybasedir = /opt/stack/nova {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.764090] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ram_allocation_ratio = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.764253] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] rate_limit_burst = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.764423] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] rate_limit_except_level = CRITICAL {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.764580] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] rate_limit_interval = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.764738] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] reboot_timeout = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.764895] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] reclaim_instance_interval = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.765057] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] record = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.765229] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] reimage_timeout_per_gb = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.765395] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] report_interval = 120 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.765556] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] rescue_timeout = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.765717] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] reserved_host_cpus = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.765875] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] reserved_host_disk_mb = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.766043] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] reserved_host_memory_mb = 512 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.766236] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] reserved_huge_pages = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.766405] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] resize_confirm_window = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.766562] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] resize_fs_using_block_device = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.766718] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] resume_guests_state_on_host_boot = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.766888] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.767062] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] rpc_response_timeout = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.767227] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] run_external_periodic_tasks = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.767394] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] running_deleted_instance_action = reap {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.767553] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.767711] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] running_deleted_instance_timeout = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.767867] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler_instance_sync_interval = 120 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.768047] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_down_time = 720 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.768237] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] servicegroup_driver = db {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.768411] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] shell_completion = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.768576] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] shelved_offload_time = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.768737] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] shelved_poll_interval = 3600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.768903] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] shutdown_timeout = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.769078] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] source_is_ipv6 = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.769257] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ssl_only = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.769510] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.769689] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] sync_power_state_interval = 600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.769851] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] sync_power_state_pool_size = 1000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.770032] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] syslog_log_facility = LOG_USER {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.770194] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] tempdir = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.770356] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] timeout_nbd = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.770524] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] transport_url = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.770686] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] update_resources_interval = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.770844] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] use_cow_images = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.771009] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] use_journal = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.771170] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] use_json = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.771327] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] use_rootwrap_daemon = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.771482] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] use_stderr = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.771637] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] use_syslog = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.771789] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vcpu_pin_set = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.771951] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plugging_is_fatal = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.772132] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plugging_timeout = 300 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.772297] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] virt_mkfs = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.772519] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] volume_usage_poll_interval = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.772698] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] watch_log_file = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.772868] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] web = /usr/share/spice-html5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 485.773079] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.773232] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.773396] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.773564] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_concurrency.disable_process_locking = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.774474] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.774677] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.774850] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.775037] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.775219] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.775391] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.775577] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.auth_strategy = keystone {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.775748] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.compute_link_prefix = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.775929] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.776132] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.dhcp_domain = novalocal {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.776323] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.enable_instance_password = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.776494] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.glance_link_prefix = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.776664] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.776838] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.777017] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.instance_list_per_project_cells = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.777183] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.list_records_by_skipping_down_cells = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.777347] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.local_metadata_per_cell = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.777520] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.max_limit = 1000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.777687] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.metadata_cache_expiration = 15 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.777862] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.neutron_default_tenant_id = default {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.778048] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.response_validation = warn {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.778240] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.use_neutron_default_nets = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.778430] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.778598] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.778767] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.778943] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.779127] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.vendordata_dynamic_targets = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.779315] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.vendordata_jsonfile_path = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.779507] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.779702] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.backend = dogpile.cache.memcached {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.779873] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.backend_argument = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.780048] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.backend_expiration_time = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.780225] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.config_prefix = cache.oslo {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.780398] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.dead_timeout = 60.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.780560] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.debug_cache_backend = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.780723] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.enable_retry_client = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.780883] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.enable_socket_keepalive = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.781066] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.enabled = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.781234] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.enforce_fips_mode = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.781399] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.expiration_time = 600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.781563] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.hashclient_retry_attempts = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.781727] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.781891] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_dead_retry = 300 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.782064] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_password = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.782234] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.782398] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.782559] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_pool_maxsize = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.782720] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.782880] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_sasl_enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.783066] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.783238] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.783393] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.memcache_username = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.783556] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.proxies = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.783716] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.redis_db = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.783876] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.redis_password = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.784059] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.784239] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.784412] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.redis_server = localhost:6379 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.784579] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.redis_socket_timeout = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.784740] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.redis_username = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.784902] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.retry_attempts = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.785076] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.retry_delay = 0.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.785244] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.socket_keepalive_count = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.785408] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.socket_keepalive_idle = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.785570] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.socket_keepalive_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.785729] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.tls_allowed_ciphers = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.785886] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.tls_cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.786056] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.tls_certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.786247] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.tls_enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.786437] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cache.tls_keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.786583] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.786758] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.auth_type = password {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.786922] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.787116] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.787282] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.787447] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.787615] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.cross_az_attach = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.787778] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.debug = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.787938] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.endpoint_template = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.788115] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.http_retries = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.788307] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.788480] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.788663] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.os_region_name = RegionOne {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.788887] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.789079] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cinder.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.789262] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.789429] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.cpu_dedicated_set = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.789590] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.cpu_shared_set = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.789757] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.image_type_exclude_list = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.789922] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.790100] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.790269] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.790436] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.790610] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.790774] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.resource_provider_association_refresh = 300 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.790938] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.791115] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.shutdown_retry_interval = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.791301] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.791486] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] conductor.workers = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.791665] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] console.allowed_origins = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.791828] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] console.ssl_ciphers = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.792007] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] console.ssl_minimum_version = default {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.792188] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] consoleauth.enforce_session_timeout = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.792359] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] consoleauth.token_ttl = 600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.792535] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.792694] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.792857] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.793029] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.793194] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.793353] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.793516] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.793676] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.793836] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.793994] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.794166] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.794325] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.794483] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.794649] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.service_type = accelerator {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.794810] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.794967] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.795137] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.795300] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.795485] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.795641] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] cyborg.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.795812] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.asyncio_connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.795975] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.asyncio_slave_connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.796190] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.backend = sqlalchemy {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.796382] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.796555] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.connection_debug = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.796729] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.connection_parameters = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.796894] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.connection_recycle_time = 3600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.797071] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.connection_trace = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.797241] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.db_inc_retry_interval = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.797410] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.db_max_retries = 20 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.797576] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.db_max_retry_interval = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.797740] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.db_retry_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.797903] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.max_overflow = 50 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.798083] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.max_pool_size = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.798269] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.max_retries = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.798453] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.798618] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.mysql_wsrep_sync_wait = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.798782] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.pool_timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.798947] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.retry_interval = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.799123] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.slave_connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.799311] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.sqlite_synchronous = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.799487] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] database.use_db_reconnect = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.799656] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.asyncio_connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.799818] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.asyncio_slave_connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.799990] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.backend = sqlalchemy {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.800178] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.800346] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.connection_debug = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.800519] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.connection_parameters = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.800685] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.connection_recycle_time = 3600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.800848] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.connection_trace = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.801026] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.db_inc_retry_interval = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.801190] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.db_max_retries = 20 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.801355] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.db_max_retry_interval = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.801523] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.db_retry_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.801686] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.max_overflow = 50 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.801849] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.max_pool_size = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.802021] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.max_retries = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.802197] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.802362] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.802523] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.pool_timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.802685] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.retry_interval = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.802845] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.slave_connection = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.803015] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] api_database.sqlite_synchronous = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.803204] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] devices.enabled_mdev_types = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.803393] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.803560] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.803722] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ephemeral_storage_encryption.enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.803883] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.804064] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.api_servers = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.804231] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.804394] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.804557] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.804715] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.804871] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.805043] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.debug = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.805214] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.default_trusted_certificate_ids = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.805375] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.enable_certificate_validation = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.805534] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.enable_rbd_download = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.805690] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.805855] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.806024] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.806212] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.806383] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.806548] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.num_retries = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.806716] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.rbd_ceph_conf = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.806880] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.rbd_connect_timeout = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.807058] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.rbd_pool = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.807230] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.rbd_user = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.807397] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.807555] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.807712] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.807877] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.service_type = image {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.808052] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.808218] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.808464] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.808667] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.808856] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.809035] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.verify_glance_signatures = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.809207] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] glance.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.809378] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] guestfs.debug = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.809549] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.809709] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.auth_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.809870] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.810042] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.810212] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.810377] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.810538] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.810697] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.810859] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.811028] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.811192] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.811353] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.811513] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.811672] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.811828] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.811996] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.service_type = shared-file-system {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.812176] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.share_apply_policy_timeout = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.812340] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.812503] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.812656] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.812815] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.812994] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.813170] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] manila.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.813339] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] mks.enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.813685] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.813875] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] image_cache.manager_interval = 2400 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.814058] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] image_cache.precache_concurrency = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.814231] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] image_cache.remove_unused_base_images = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.814405] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.814574] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.814750] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] image_cache.subdirectory_name = _base {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.814926] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.api_max_retries = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.815106] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.api_retry_interval = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.815273] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.815434] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.auth_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.815594] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.815752] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.815915] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.816091] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.conductor_group = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.816299] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.816470] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.816631] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.816795] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.816955] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.817129] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.817290] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.817456] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.peer_list = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.817616] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.817774] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.817939] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.serial_console_state_timeout = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.818110] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.818328] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.service_type = baremetal {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.818498] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.shard = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.818662] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.818822] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.818979] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.819154] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.819361] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.819531] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ironic.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.819713] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.819888] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] key_manager.fixed_key = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.820085] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.820251] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.barbican_api_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.820414] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.barbican_endpoint = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.820585] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.barbican_endpoint_type = public {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.820742] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.barbican_region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.820902] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.821072] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.821247] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.821426] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.821588] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.821752] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.number_of_retries = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.821914] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.retry_delay = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.822090] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.send_service_user_token = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.822255] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.822415] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.822574] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.verify_ssl = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.822731] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican.verify_ssl_path = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.822898] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.823070] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.auth_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.823236] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.823397] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.823584] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.823716] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.823875] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.824047] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.824212] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] barbican_service_user.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.824382] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.approle_role_id = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.824542] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.approle_secret_id = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.824711] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.kv_mountpoint = secret {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.824872] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.kv_path = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.825048] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.kv_version = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.825212] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.namespace = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.825375] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.root_token_id = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.825533] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.ssl_ca_crt_file = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.825698] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.timeout = 60.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.825857] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.use_ssl = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.826034] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.826243] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.826408] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.826572] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.826733] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.826893] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.827063] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.827228] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.827390] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.827545] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.827700] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.827856] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.828023] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.828190] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.828390] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.service_type = identity {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.828557] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.828718] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.828877] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.829048] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.829245] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.829422] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] keystone.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.829614] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.ceph_mount_options = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.829908] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.830100] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.connection_uri = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.830268] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.cpu_mode = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.830439] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.830608] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.cpu_models = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.830778] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.cpu_power_governor_high = performance {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.830946] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.831122] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.cpu_power_management = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.831306] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.831499] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.device_detach_attempts = 8 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.831669] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.device_detach_timeout = 20 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.831838] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.disk_cachemodes = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.831998] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.disk_prefix = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.832176] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.enabled_perf_events = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.832341] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.file_backed_memory = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.832509] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.gid_maps = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.832666] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.hw_disk_discard = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.832823] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.hw_machine_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.832992] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.images_rbd_ceph_conf = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.833170] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.833354] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.833539] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.images_rbd_glance_store_name = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.833711] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.images_rbd_pool = rbd {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.833881] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.images_type = default {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.834054] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.images_volume_group = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.834223] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.inject_key = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.834388] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.inject_partition = -2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.834549] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.inject_password = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.834708] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.iscsi_iface = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.834869] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.iser_use_multipath = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.835043] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.835210] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.835376] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_downtime = 500 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.835539] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.835703] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.835864] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_inbound_addr = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.836032] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.836222] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.836394] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_scheme = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.836566] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_timeout_action = abort {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.836728] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_tunnelled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.836887] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_uri = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.837062] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.live_migration_with_native_tls = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.837231] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.max_queues = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.837397] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.837630] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.837795] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.nfs_mount_options = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.838098] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.838307] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.838527] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.838650] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.838818] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.838986] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.num_pcie_ports = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.839172] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.839385] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.pmem_namespaces = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.839555] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.quobyte_client_cfg = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.839853] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.840044] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.840218] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.840400] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.840563] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rbd_secret_uuid = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.840722] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rbd_user = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.840888] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.841070] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.841234] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rescue_image_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.841397] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rescue_kernel_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.841558] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rescue_ramdisk_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.841729] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.841890] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.rx_queue_size = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.842072] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.smbfs_mount_options = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.842370] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.842547] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.snapshot_compression = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.842712] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.snapshot_image_format = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.842933] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.843112] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.sparse_logical_volumes = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.843279] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.swtpm_enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.843453] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.swtpm_group = tss {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.843621] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.swtpm_user = tss {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.843792] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.sysinfo_serial = unique {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.843953] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.tb_cache_size = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.844124] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.tx_queue_size = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.844291] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.uid_maps = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.844456] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.use_virtio_for_bridges = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.844627] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.virt_type = kvm {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.844796] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.volume_clear = zero {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.844959] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.volume_clear_size = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.845144] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.volume_use_multipath = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.845305] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.vzstorage_cache_path = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.845477] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.845644] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.845807] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.845973] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.846272] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.846455] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.vzstorage_mount_user = stack {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.846631] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.846804] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.846980] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.auth_type = password {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.847157] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.847321] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.847493] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.847656] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.847816] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.847986] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.default_floating_pool = public {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.848160] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.848359] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.extension_sync_interval = 600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.848529] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.http_retries = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.848691] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.848849] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.849020] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.849200] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.849391] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.849566] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.ovs_bridge = br-int {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.849731] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.physnets = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.849904] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.region_name = RegionOne {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.850079] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.850255] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.service_metadata_proxy = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.850420] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.850589] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.service_type = network {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.850748] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.850907] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.851075] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.851239] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.851420] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.851580] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] neutron.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.851750] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] notifications.bdms_in_notifications = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.851926] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] notifications.default_level = INFO {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.852100] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] notifications.include_share_mapping = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.852279] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] notifications.notification_format = unversioned {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.852446] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] notifications.notify_on_state_change = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.852618] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.852793] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] pci.alias = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.852962] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] pci.device_spec = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.853140] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] pci.report_in_placement = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.853314] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.853487] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.auth_type = password {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.853656] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.853815] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.853976] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.854151] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.854312] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.854475] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.854635] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.default_domain_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.854792] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.default_domain_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.854949] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.domain_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.855120] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.domain_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.855280] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.855441] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.855596] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.855751] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.855907] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.856087] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.password = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.856251] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.project_domain_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.856419] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.project_domain_name = Default {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.856586] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.project_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.856758] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.project_name = service {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.856926] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.region_name = RegionOne {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.857100] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.857263] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.857435] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.service_type = placement {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.857594] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.857764] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.857924] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.858093] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.system_scope = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.858278] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.858448] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.trust_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.858608] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.user_domain_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.858776] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.user_domain_name = Default {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.858934] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.user_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.859121] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.username = nova {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.859350] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.859555] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] placement.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.859743] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.cores = 20 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.859911] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.count_usage_from_placement = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.860098] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.860277] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.injected_file_content_bytes = 10240 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.860448] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.injected_file_path_length = 255 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.860618] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.injected_files = 5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.860786] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.instances = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.860953] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.key_pairs = 100 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.861134] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.metadata_items = 128 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.861305] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.ram = 51200 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.861472] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.recheck_quota = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.861643] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.server_group_members = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.861810] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.server_groups = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.862027] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.862208] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] quota.unified_limits_resource_strategy = require {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.862390] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.862555] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.862716] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.image_metadata_prefilter = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.862876] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.863052] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.max_attempts = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.863222] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.max_placement_results = 1000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.863389] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.863550] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.863711] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.863885] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] scheduler.workers = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.864071] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.864245] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.864426] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.864594] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.864760] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.864923] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.865099] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.865291] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.865465] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.host_subset_size = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.865633] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.865793] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.865956] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.866137] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.isolated_hosts = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.866304] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.isolated_images = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.866484] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.866647] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.866813] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.866972] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.pci_in_placement = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.867148] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.867311] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.867480] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.867635] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.867795] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.867957] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.868132] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.track_instance_changes = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.868334] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.868512] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] metrics.required = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.868680] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] metrics.weight_multiplier = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.868846] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.869022] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] metrics.weight_setting = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.869346] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.869534] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] serial_console.enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.869714] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] serial_console.port_range = 10000:20000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.869887] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.870070] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.870246] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] serial_console.serialproxy_port = 6083 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.870419] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.870593] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.auth_type = password {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.870756] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.870915] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.871090] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.871255] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.871415] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.871587] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.send_service_user_token = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.871749] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.871909] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] service_user.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.872092] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.agent_enabled = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.872261] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.872561] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.872764] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.872935] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.html5proxy_port = 6082 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.873113] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.image_compression = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.873279] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.jpeg_compression = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.873442] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.playback_compression = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.873604] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.require_secure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.873772] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.server_listen = 127.0.0.1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.873940] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.874116] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.streaming_mode = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.874277] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] spice.zlib_compression = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.874449] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] upgrade_levels.baseapi = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.874621] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] upgrade_levels.compute = auto {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.874781] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] upgrade_levels.conductor = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.874940] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] upgrade_levels.scheduler = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.875118] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.875282] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.875444] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.875603] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.875764] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.875922] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.876091] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.876257] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.876417] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vendordata_dynamic_auth.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.876589] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.api_retry_count = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.876749] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.ca_file = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.876921] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.877101] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.cluster_name = testcl1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.877271] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.connection_pool_size = 10 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.877434] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.console_delay_seconds = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.877605] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.datastore_regex = ^datastore.* {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.877809] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.877986] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.host_password = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.878173] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.host_port = 443 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.878376] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.host_username = administrator@vsphere.local {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.878553] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.insecure = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.878717] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.integration_bridge = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.878883] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.maximum_objects = 100 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.879055] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.pbm_default_policy = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.879221] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.pbm_enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.879407] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.pbm_wsdl_location = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.879585] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.879745] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.serial_port_proxy_uri = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.879901] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.serial_port_service_uri = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.880076] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.task_poll_interval = 0.5 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.880252] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.use_linked_clone = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.880424] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.vnc_keymap = en-us {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.880612] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.vnc_port = 5900 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.880788] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vmware.vnc_port_total = 10000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.880976] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.auth_schemes = ['none'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.881171] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.881460] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.881649] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.881821] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.novncproxy_port = 6080 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.882029] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.server_listen = 127.0.0.1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.882220] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.882389] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.vencrypt_ca_certs = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.882550] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.vencrypt_client_cert = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.882709] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vnc.vencrypt_client_key = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.882882] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.883064] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.disable_deep_image_inspection = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.883230] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.883392] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.883552] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.883715] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.disable_rootwrap = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.883876] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.enable_numa_live_migration = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.884045] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.884211] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.884372] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.884531] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.libvirt_disable_apic = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.884688] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.884850] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.885024] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.885181] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.885340] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.885499] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.885656] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.885813] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.885970] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.886147] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.886333] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.886506] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.client_socket_timeout = 900 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.886673] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.default_pool_size = 1000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.886840] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.keep_alive = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.887016] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.max_header_line = 16384 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.887191] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.887358] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.ssl_ca_file = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.887523] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.ssl_cert_file = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.887697] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.ssl_key_file = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.887850] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.tcp_keepidle = 600 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.888046] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.888244] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] zvm.ca_file = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.888415] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] zvm.cloud_connector_url = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.888711] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.888889] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] zvm.reachable_timeout = 300 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.889078] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.889264] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.889467] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.connection_string = messaging:// {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.889639] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.enabled = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.889809] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.es_doc_type = notification {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.889980] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.es_scroll_size = 10000 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.890165] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.es_scroll_time = 2m {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.890328] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.filter_error_trace = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.890496] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.hmac_keys = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.890689] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.sentinel_service_name = mymaster {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.890859] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.socket_timeout = 0.1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.891030] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.trace_requests = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.891196] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler.trace_sqlalchemy = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.891378] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler_jaeger.process_tags = {} {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.891540] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler_jaeger.service_name_prefix = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.891705] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] profiler_otlp.service_name_prefix = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.891871] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] remote_debug.host = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.892041] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] remote_debug.port = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.892224] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.892390] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.892554] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.892717] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.892876] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.893062] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.893234] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.893401] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.893561] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.893731] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.893891] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.894074] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.894242] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.894408] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.894578] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.894740] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.894905] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.895087] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.895257] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.895421] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.895587] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.895754] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.895916] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.896092] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.896261] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.896422] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.896584] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.896744] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.896903] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.897076] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.ssl = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.897248] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.897415] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.897575] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.897740] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.897904] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.898073] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.898288] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.898470] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_notifications.retry = -1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.898649] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.898820] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.898990] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.auth_section = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.899171] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.auth_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.899352] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.cafile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.899526] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.certfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.899690] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.collect_timing = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.899848] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.connect_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.900019] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.connect_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.900183] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.endpoint_id = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.900359] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.900530] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.endpoint_override = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.900711] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.endpoint_region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.900874] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.endpoint_service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.901048] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.endpoint_service_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.901217] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.insecure = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.901377] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.keyfile = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.901534] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.max_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.901693] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.min_version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.901851] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.region_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.902015] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.retriable_status_codes = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.902181] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.service_name = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.902344] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.service_type = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.902507] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.split_loggers = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.902665] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.status_code_retries = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.902824] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.status_code_retry_delay = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.902982] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.timeout = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.903158] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.valid_interfaces = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.903317] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_limit.version = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.903486] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_reports.file_event_handler = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.903650] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.903810] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] oslo_reports.log_dir = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.903983] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.904159] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.904337] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.904485] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.904652] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.904812] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.904981] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.905157] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_ovs_privileged.group = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.905317] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.905484] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.905646] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.905806] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] vif_plug_ovs_privileged.user = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.905976] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.906168] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.906344] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.906517] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.906686] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.906858] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.907035] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.907205] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.907396] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.907599] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_ovs.isolate_vif = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.907781] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.907951] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.908140] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.908344] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.908519] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] os_vif_ovs.per_port_bridge = False {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.908693] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] privsep_osbrick.capabilities = [21] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.908856] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] privsep_osbrick.group = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.909025] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] privsep_osbrick.helper_command = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.909201] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.909395] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.909570] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] privsep_osbrick.user = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.909747] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.909909] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] nova_sys_admin.group = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.910080] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] nova_sys_admin.helper_command = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.910250] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.910415] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.910594] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] nova_sys_admin.user = None {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 485.910737] env[62383]: DEBUG oslo_service.service [None req-62317746-07d2-4079-ab0a-9bb1b506ebe6 None None] ******************************************************************************** {{(pid=62383) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 485.911151] env[62383]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 486.414835] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Getting list of instances from cluster (obj){ [ 486.414835] env[62383]: value = "domain-c8" [ 486.414835] env[62383]: _type = "ClusterComputeResource" [ 486.414835] env[62383]: } {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 486.415930] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e286abf6-e60d-40ac-807f-b7ab7c741de2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 486.424646] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Got total of 0 instances {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 486.425270] env[62383]: WARNING nova.virt.vmwareapi.driver [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 486.425715] env[62383]: INFO nova.virt.node [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Generated node identity 60615f54-0557-436e-a486-87505bffb4c7 [ 486.425951] env[62383]: INFO nova.virt.node [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Wrote node identity 60615f54-0557-436e-a486-87505bffb4c7 to /opt/stack/data/n-cpu-1/compute_id [ 486.929157] env[62383]: WARNING nova.compute.manager [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Compute nodes ['60615f54-0557-436e-a486-87505bffb4c7'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 487.934317] env[62383]: INFO nova.compute.manager [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 488.939710] env[62383]: WARNING nova.compute.manager [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 488.940066] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 488.940236] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 488.940389] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 488.940543] env[62383]: DEBUG nova.compute.resource_tracker [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 488.941493] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96dc37f5-c71a-4589-a842-ee0c8ffcc814 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 488.949364] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf810049-3a05-41c6-b5ad-b78480640486 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 488.962819] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689abf79-4ff6-44ef-bea4-366de88127ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 488.969104] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22099eb-5eb4-4a5c-b647-ee365aff47f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 488.997596] env[62383]: DEBUG nova.compute.resource_tracker [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181214MB free_disk=146GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 488.997745] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 488.997932] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 489.500626] env[62383]: WARNING nova.compute.resource_tracker [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] No compute node record for cpu-1:60615f54-0557-436e-a486-87505bffb4c7: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 60615f54-0557-436e-a486-87505bffb4c7 could not be found. [ 490.004557] env[62383]: INFO nova.compute.resource_tracker [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 60615f54-0557-436e-a486-87505bffb4c7 [ 491.514332] env[62383]: DEBUG nova.compute.resource_tracker [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 491.514687] env[62383]: DEBUG nova.compute.resource_tracker [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 491.670161] env[62383]: INFO nova.scheduler.client.report [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] [req-bc123735-e3a0-4b2f-9388-5a64c4c28399] Created resource provider record via placement API for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 491.688624] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580ef7b2-08a5-4a71-85cf-5bde415e8e52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.696668] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73c5ed7-f6c2-498f-9b7f-825f835d4bf4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.727304] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d361dd-faa8-4d41-ace5-dbf19b944098 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.734617] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1baa46a-1c8a-4aa0-97db-05efa06dddf3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.747545] env[62383]: DEBUG nova.compute.provider_tree [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 492.296709] env[62383]: DEBUG nova.scheduler.client.report [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 492.296709] env[62383]: DEBUG nova.compute.provider_tree [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 0 to 1 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 492.296709] env[62383]: DEBUG nova.compute.provider_tree [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 492.352692] env[62383]: DEBUG nova.compute.provider_tree [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 1 to 2 during operation: update_traits {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 492.857044] env[62383]: DEBUG nova.compute.resource_tracker [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 492.857475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.859s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 492.857475] env[62383]: DEBUG nova.service [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Creating RPC server for service compute {{(pid=62383) start /opt/stack/nova/nova/service.py:186}} [ 492.870888] env[62383]: DEBUG nova.service [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] Join ServiceGroup membership for this service compute {{(pid=62383) start /opt/stack/nova/nova/service.py:203}} [ 492.871076] env[62383]: DEBUG nova.servicegroup.drivers.db [None req-8d98e3ca-2f02-4687-b9fa-55868ce3ed2f None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62383) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 492.871740] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 493.374555] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Getting list of instances from cluster (obj){ [ 493.374555] env[62383]: value = "domain-c8" [ 493.374555] env[62383]: _type = "ClusterComputeResource" [ 493.374555] env[62383]: } {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 493.375753] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aede97ba-7d72-432e-a3aa-b7e7bb0b3018 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 493.384347] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Got total of 0 instances {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 493.384596] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 493.384888] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Getting list of instances from cluster (obj){ [ 493.384888] env[62383]: value = "domain-c8" [ 493.384888] env[62383]: _type = "ClusterComputeResource" [ 493.384888] env[62383]: } {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 493.385749] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88631e8f-2239-41ee-b7b8-12cb70668c06 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 493.392938] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Got total of 0 instances {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 531.799033] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.802202] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.305533] env[62383]: DEBUG nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 532.828416] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "8e911bad-5408-4588-9865-912ce4457d34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.828726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "8e911bad-5408-4588-9865-912ce4457d34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.862692] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.862918] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.865094] env[62383]: INFO nova.compute.claims [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.335016] env[62383]: DEBUG nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 533.604428] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "8a2b209c-423c-446c-a769-f7d7820d46da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.604596] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "8a2b209c-423c-446c-a769-f7d7820d46da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.823438] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "ab338058-13c8-4df9-ba55-fabe1952557d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.823438] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "ab338058-13c8-4df9-ba55-fabe1952557d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.863346] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.958902] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fbbf78-e69a-488b-b4b4-1c146e118022 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.966966] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2703c0ac-0270-439b-8b03-047ec7efe861 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.001838] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2719bd1-40f7-4c73-8023-35378d7334ee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.011092] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0305060-73fd-42d5-8b45-3d376ec96df8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.034131] env[62383]: DEBUG nova.compute.provider_tree [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.106889] env[62383]: DEBUG nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 534.327172] env[62383]: DEBUG nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 534.538073] env[62383]: DEBUG nova.scheduler.client.report [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 534.636486] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.847890] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.045190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.182s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.048216] env[62383]: DEBUG nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 535.051417] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.189s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.053626] env[62383]: INFO nova.compute.claims [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.555234] env[62383]: DEBUG nova.compute.utils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 535.564601] env[62383]: DEBUG nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 535.564601] env[62383]: DEBUG nova.network.neutron [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 535.646620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.646999] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.850914] env[62383]: DEBUG nova.policy [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f00da7582832443e9052a6cb34ddc2ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0319810679c349b89d4129e7964d2a72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 536.066717] env[62383]: DEBUG nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 536.150697] env[62383]: DEBUG nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 536.309490] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27eb7f7-7567-4dc6-9568-c62b90c4ba96 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.318845] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19330a2a-0c99-415d-8d1f-76e4d405d9ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.355227] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3ac788-4066-4d57-9114-814c3ff488c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.362723] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3a7b9c-1a3c-409a-8bd2-03fdc8e44283 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.376731] env[62383]: DEBUG nova.compute.provider_tree [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.390705] env[62383]: DEBUG nova.network.neutron [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Successfully created port: 1c906cf8-6b21-4337-af7e-2bd00715405e {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 536.624399] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.624712] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.681134] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.881295] env[62383]: DEBUG nova.scheduler.client.report [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 537.082989] env[62383]: DEBUG nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 537.129134] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 537.129495] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 537.129990] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 537.131223] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 537.131223] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 537.131223] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 537.133213] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 537.133213] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 537.134865] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 537.134865] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 537.134865] env[62383]: DEBUG nova.virt.hardware [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 537.134865] env[62383]: DEBUG nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 537.143079] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8030be3f-e220-4d8c-b76e-ef0930bd7bdf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.159826] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9031e327-2acc-4ca8-a284-eed43f2b03a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.184829] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330358b6-c82b-48d4-9f3d-0b7dabc84d6d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.386092] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.386707] env[62383]: DEBUG nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 537.391624] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.755s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.392982] env[62383]: INFO nova.compute.claims [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 537.680649] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.898252] env[62383]: DEBUG nova.compute.utils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 537.902346] env[62383]: DEBUG nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 537.902346] env[62383]: DEBUG nova.network.neutron [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 537.984634] env[62383]: DEBUG nova.policy [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c59d21fc407f49acb7752b6053101ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe96856d7cbb433981c53498b15cfef3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 538.405097] env[62383]: DEBUG nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 538.545618] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d363663f-1cf7-4257-ac54-a3cabab72910 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.554672] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f359757-9f3b-42c6-b7da-c9759d945844 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.595912] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e934b2e-bcad-4ffa-9821-6c84d4c03432 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.602874] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4ce0d8-3b40-4687-b39d-d761bad2b59d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.617299] env[62383]: DEBUG nova.compute.provider_tree [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 538.988483] env[62383]: DEBUG nova.network.neutron [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Successfully created port: f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.072787] env[62383]: DEBUG nova.network.neutron [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Successfully updated port: 1c906cf8-6b21-4337-af7e-2bd00715405e {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 539.120605] env[62383]: DEBUG nova.scheduler.client.report [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 539.419570] env[62383]: DEBUG nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 539.454034] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 539.454034] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.454034] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 539.454253] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.454253] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 539.454253] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 539.454253] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 539.454253] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 539.454390] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 539.454390] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 539.454390] env[62383]: DEBUG nova.virt.hardware [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 539.457161] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd2f813-753f-4a2d-8772-6d8197730cdc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.463558] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c4809d-bf69-4617-bfb3-e501b1dce01e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.575478] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "refresh_cache-7b8c8c12-fcf3-4b54-ae22-3aead1344803" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.575867] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired lock "refresh_cache-7b8c8c12-fcf3-4b54-ae22-3aead1344803" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.576263] env[62383]: DEBUG nova.network.neutron [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 539.627345] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.628270] env[62383]: DEBUG nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 539.634247] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.784s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.634247] env[62383]: INFO nova.compute.claims [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.092288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.092761] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.134997] env[62383]: DEBUG nova.compute.utils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.137942] env[62383]: DEBUG nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 540.138054] env[62383]: DEBUG nova.network.neutron [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 540.195183] env[62383]: DEBUG nova.network.neutron [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 540.425998] env[62383]: DEBUG nova.policy [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b4bb2a0ba90e4282bcdb3c3093052f64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0162430e5f0c4e27a29777b764454fe6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 540.598087] env[62383]: DEBUG nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 540.642322] env[62383]: DEBUG nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 540.686018] env[62383]: DEBUG nova.network.neutron [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Updating instance_info_cache with network_info: [{"id": "1c906cf8-6b21-4337-af7e-2bd00715405e", "address": "fa:16:3e:c4:e6:79", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c906cf8-6b", "ovs_interfaceid": "1c906cf8-6b21-4337-af7e-2bd00715405e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.794749] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283ebd93-ff1b-4402-9e19-ef2a32aed41a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.803014] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2cda9a-b064-4580-ae85-617a1da4ddc3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.832542] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2311e61c-d523-4e5d-a52f-368b07c0a545 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.839886] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f036ef-e0a1-4323-a31a-722fdbfbcf5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.854035] env[62383]: DEBUG nova.compute.provider_tree [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.119194] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.188183] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Releasing lock "refresh_cache-7b8c8c12-fcf3-4b54-ae22-3aead1344803" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.189213] env[62383]: DEBUG nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Instance network_info: |[{"id": "1c906cf8-6b21-4337-af7e-2bd00715405e", "address": "fa:16:3e:c4:e6:79", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c906cf8-6b", "ovs_interfaceid": "1c906cf8-6b21-4337-af7e-2bd00715405e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 541.189713] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:e6:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c906cf8-6b21-4337-af7e-2bd00715405e', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 541.206084] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 541.207708] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-365ff221-43c9-4411-bd5f-39ca0a56476c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.221892] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Created folder: OpenStack in parent group-v4. [ 541.222150] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Creating folder: Project (0319810679c349b89d4129e7964d2a72). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 541.222405] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31d4be31-845a-4f0c-b2b7-93227dc68360 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.233117] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Created folder: Project (0319810679c349b89d4129e7964d2a72) in parent group-v496304. [ 541.233320] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Creating folder: Instances. Parent ref: group-v496305. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 541.233759] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abde8865-c90d-4d56-ba18-7c4a9ee53e81 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.243892] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Created folder: Instances in parent group-v496305. [ 541.244362] env[62383]: DEBUG oslo.service.loopingcall [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 541.244362] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 541.244538] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-863ba15b-fd54-4b87-b19f-795fbf584af4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.259360] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.260074] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.260074] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 541.260074] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Rebuilding the list of instances to heal {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 541.265803] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 541.265803] env[62383]: value = "task-2450914" [ 541.265803] env[62383]: _type = "Task" [ 541.265803] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.276478] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450914, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.359307] env[62383]: DEBUG nova.scheduler.client.report [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 541.655661] env[62383]: DEBUG nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 541.685415] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 541.687020] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 541.687020] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 541.687020] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 541.687020] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 541.687020] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 541.690028] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 541.690028] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 541.690028] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 541.690028] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 541.690028] env[62383]: DEBUG nova.virt.hardware [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 541.690405] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbac940-36a8-40b1-8247-d647158383b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.703030] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8f11db-ca5b-49f4-b8d2-b78a5abafff1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.763137] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 541.763335] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 541.763471] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 541.763594] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 541.763714] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Didn't find any instances for network info cache update. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 541.763948] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.764182] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.764369] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.764553] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.766296] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.767175] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.767387] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 541.771982] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 541.779385] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450914, 'name': CreateVM_Task, 'duration_secs': 0.360564} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.779544] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 541.803068] env[62383]: DEBUG oslo_vmware.service [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07c8e96e-ec63-4740-8b60-0dc09e075106 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.817196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.817196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.817645] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 541.817925] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df8c5769-9feb-4b8c-9a8c-2ff0f1b10c7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.825197] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 541.825197] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52098320-d07c-74eb-2477-56a5815afc1e" [ 541.825197] env[62383]: _type = "Task" [ 541.825197] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.839411] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52098320-d07c-74eb-2477-56a5815afc1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.867406] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.235s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.868036] env[62383]: DEBUG nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 541.870889] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.190s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.872468] env[62383]: INFO nova.compute.claims [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.244146] env[62383]: DEBUG nova.network.neutron [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Successfully created port: 7e2cc599-2a7f-4045-b958-4141268a4ab9 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.276594] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.340840] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.341156] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 542.341408] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.341553] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.342019] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 542.342384] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e59da59-e34d-47fc-8780-62090ae8fb82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.360268] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 542.360268] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 542.360268] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155edf73-9128-44d0-86db-679a9fc59bd0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.370022] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8a773ef-83df-4c5f-b9f9-8fda62d7cebe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.376557] env[62383]: DEBUG nova.compute.utils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 542.380201] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 542.380201] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52cb4a5d-6e9f-0934-b919-a9fc244d03a8" [ 542.380201] env[62383]: _type = "Task" [ 542.380201] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.383219] env[62383]: DEBUG nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 542.383219] env[62383]: DEBUG nova.network.neutron [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 542.396659] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Preparing fetch location {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 542.397141] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Creating directory with path [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 542.397492] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6c60b17-0464-42c3-8afc-91ad08cc7c2a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.417796] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Created directory with path [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 542.418113] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Fetch image to [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 542.418415] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Downloading image file data cac3b430-a1d5-4ad1-92ec-34c2261779a8 to [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk on the data store datastore2 {{(pid=62383) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 542.419798] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38c2eb7-ec0a-4849-af65-376efb11e833 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.430541] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbc8775-99f6-4fa9-ae35-aceac5f108a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.438765] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea24861b-b4fb-41b4-bbeb-ed4fc5b9f9e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.489347] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025225df-2565-459f-a73c-1f2f6eae6426 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.493535] env[62383]: DEBUG nova.compute.manager [req-0782970a-5e0a-436c-bc07-81a45b4c392b req-569fb432-0edd-42e7-bb0b-e089efbfef3f service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Received event network-vif-plugged-1c906cf8-6b21-4337-af7e-2bd00715405e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 542.495384] env[62383]: DEBUG oslo_concurrency.lockutils [req-0782970a-5e0a-436c-bc07-81a45b4c392b req-569fb432-0edd-42e7-bb0b-e089efbfef3f service nova] Acquiring lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.495384] env[62383]: DEBUG oslo_concurrency.lockutils [req-0782970a-5e0a-436c-bc07-81a45b4c392b req-569fb432-0edd-42e7-bb0b-e089efbfef3f service nova] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.495384] env[62383]: DEBUG oslo_concurrency.lockutils [req-0782970a-5e0a-436c-bc07-81a45b4c392b req-569fb432-0edd-42e7-bb0b-e089efbfef3f service nova] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.495384] env[62383]: DEBUG nova.compute.manager [req-0782970a-5e0a-436c-bc07-81a45b4c392b req-569fb432-0edd-42e7-bb0b-e089efbfef3f service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] No waiting events found dispatching network-vif-plugged-1c906cf8-6b21-4337-af7e-2bd00715405e {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 542.495384] env[62383]: WARNING nova.compute.manager [req-0782970a-5e0a-436c-bc07-81a45b4c392b req-569fb432-0edd-42e7-bb0b-e089efbfef3f service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Received unexpected event network-vif-plugged-1c906cf8-6b21-4337-af7e-2bd00715405e for instance with vm_state building and task_state spawning. [ 542.500053] env[62383]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fb3b1907-a975-4322-a738-67cd3bffa402 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.532731] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Downloading image file data cac3b430-a1d5-4ad1-92ec-34c2261779a8 to the data store datastore2 {{(pid=62383) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 542.575328] env[62383]: DEBUG nova.network.neutron [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Successfully updated port: f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 542.604882] env[62383]: DEBUG nova.policy [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6cdd680c92c4d109b69ceb56cfe7841', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17cb0d24b1834501a56eb07ff7593774', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 542.617442] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62383) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 542.881746] env[62383]: DEBUG nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 543.080415] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.080415] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.080531] env[62383]: DEBUG nova.network.neutron [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 543.102778] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcdfcff-592f-4b78-b063-df89120385b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.121460] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca74cbb4-4111-47be-8259-004e70a8c5af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.165279] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514de9d6-390c-481e-b34f-aea3b5b6ca62 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.178848] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec8abc9-100d-4c90-8e50-e28cdb99e9c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.199073] env[62383]: DEBUG nova.compute.provider_tree [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.436467] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Completed reading data from the image iterator. {{(pid=62383) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 543.436467] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 543.495340] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Downloaded image file data cac3b430-a1d5-4ad1-92ec-34c2261779a8 to vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk on the data store datastore2 {{(pid=62383) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 543.497649] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Caching image {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 543.497649] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Copying Virtual Disk [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk to [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 543.497649] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc4476cb-6a96-4557-bead-a4841f85079f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.505641] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 543.505641] env[62383]: value = "task-2450915" [ 543.505641] env[62383]: _type = "Task" [ 543.505641] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.515027] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.595037] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.595140] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.691768] env[62383]: DEBUG nova.network.neutron [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.705809] env[62383]: DEBUG nova.scheduler.client.report [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 543.786018] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "da16da02-25ab-46f9-9070-9fdde0b3a75e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.786327] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.902179] env[62383]: DEBUG nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 543.937022] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 543.938396] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 543.938396] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 543.938558] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 543.938726] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 543.938889] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 543.939113] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 543.939278] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 543.939456] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 543.939614] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 543.943194] env[62383]: DEBUG nova.virt.hardware [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 543.943611] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67cb2c8-82bf-4a8e-9fb3-de0c4d4f1250 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.956572] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99aff387-42d0-4bb4-84b7-99f05842ddbc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.018092] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450915, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.021783] env[62383]: DEBUG nova.network.neutron [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Successfully created port: e22f03a4-9d5c-4c58-ab19-bfd809d30f2b {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 544.100197] env[62383]: DEBUG nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 544.210076] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.211166] env[62383]: DEBUG nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 544.215671] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.535s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.217599] env[62383]: INFO nova.compute.claims [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.289185] env[62383]: DEBUG nova.compute.manager [req-017d585e-c907-4c8f-a4de-a46a104a3fed req-c5cedcac-220d-4ea0-ad64-7b3f2d4ca3de service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Received event network-vif-plugged-f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 544.289425] env[62383]: DEBUG oslo_concurrency.lockutils [req-017d585e-c907-4c8f-a4de-a46a104a3fed req-c5cedcac-220d-4ea0-ad64-7b3f2d4ca3de service nova] Acquiring lock "8e911bad-5408-4588-9865-912ce4457d34-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.289634] env[62383]: DEBUG oslo_concurrency.lockutils [req-017d585e-c907-4c8f-a4de-a46a104a3fed req-c5cedcac-220d-4ea0-ad64-7b3f2d4ca3de service nova] Lock "8e911bad-5408-4588-9865-912ce4457d34-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.289833] env[62383]: DEBUG oslo_concurrency.lockutils [req-017d585e-c907-4c8f-a4de-a46a104a3fed req-c5cedcac-220d-4ea0-ad64-7b3f2d4ca3de service nova] Lock "8e911bad-5408-4588-9865-912ce4457d34-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.290182] env[62383]: DEBUG nova.compute.manager [req-017d585e-c907-4c8f-a4de-a46a104a3fed req-c5cedcac-220d-4ea0-ad64-7b3f2d4ca3de service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] No waiting events found dispatching network-vif-plugged-f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 544.290864] env[62383]: WARNING nova.compute.manager [req-017d585e-c907-4c8f-a4de-a46a104a3fed req-c5cedcac-220d-4ea0-ad64-7b3f2d4ca3de service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Received unexpected event network-vif-plugged-f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e for instance with vm_state building and task_state spawning. [ 544.290864] env[62383]: DEBUG nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 544.390127] env[62383]: DEBUG nova.network.neutron [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Updating instance_info_cache with network_info: [{"id": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "address": "fa:16:3e:d1:3e:92", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fa0d52-c6", "ovs_interfaceid": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.523923] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450915, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663255} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.525014] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Copied Virtual Disk [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk to [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 544.525014] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Deleting the datastore file [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 544.525014] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37e451a9-a2fd-478b-ae77-5082ad96dd35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.535812] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 544.535812] env[62383]: value = "task-2450916" [ 544.535812] env[62383]: _type = "Task" [ 544.535812] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.545622] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.629521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.731709] env[62383]: DEBUG nova.compute.utils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.733705] env[62383]: DEBUG nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 544.733705] env[62383]: DEBUG nova.network.neutron [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 544.816592] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.894868] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Releasing lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.895437] env[62383]: DEBUG nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Instance network_info: |[{"id": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "address": "fa:16:3e:d1:3e:92", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fa0d52-c6", "ovs_interfaceid": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 544.896032] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:3e:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 544.905921] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Creating folder: Project (fe96856d7cbb433981c53498b15cfef3). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 544.906606] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d373196-8843-4b53-b1bf-0356e795e1b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.920825] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Created folder: Project (fe96856d7cbb433981c53498b15cfef3) in parent group-v496304. [ 544.920825] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Creating folder: Instances. Parent ref: group-v496308. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 544.920825] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-015c6ca5-81de-42ee-83f8-863ebd58bccc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.935873] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Created folder: Instances in parent group-v496308. [ 544.936667] env[62383]: DEBUG oslo.service.loopingcall [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.936667] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 544.936667] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa52836f-0bbe-4c33-8f2c-00dba7635140 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.964625] env[62383]: DEBUG nova.policy [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d374b5a04f94016b0f5aa198b02b40b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8d1b45dd8d74bf9a01173d57990d06b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 544.975335] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 544.975335] env[62383]: value = "task-2450919" [ 544.975335] env[62383]: _type = "Task" [ 544.975335] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.983950] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450919, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.044514] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02325} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.044857] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 545.045093] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Moving file from [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520/cac3b430-a1d5-4ad1-92ec-34c2261779a8 to [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8. {{(pid=62383) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 545.045344] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f7bb2a25-db73-4fc7-acdf-9efa7856115e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.055708] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 545.055708] env[62383]: value = "task-2450920" [ 545.055708] env[62383]: _type = "Task" [ 545.055708] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.065140] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450920, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.244561] env[62383]: DEBUG nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 545.457244] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de01f25-d9ce-4014-b407-e8c0a611e258 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.467425] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f33d88e-d304-48dc-84d9-ccc9e6bf7a73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.511305] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a73f92-8419-4e9a-8a08-aa91385fcd23 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.525385] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2ca136-e466-4618-be7e-667473e86caa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.530888] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450919, 'name': CreateVM_Task, 'duration_secs': 0.400754} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.531171] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 545.532288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.532500] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.532914] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 545.533323] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5751d7fb-afa1-49c2-ba77-c2ab8d45a87a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.545585] env[62383]: DEBUG nova.compute.provider_tree [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.549640] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 545.549640] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52163568-d251-8f9d-0425-2adffa609e71" [ 545.549640] env[62383]: _type = "Task" [ 545.549640] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.558462] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52163568-d251-8f9d-0425-2adffa609e71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.568344] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450920, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.026486} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.570116] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] File moved {{(pid=62383) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 545.570116] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Cleaning up location [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 545.570116] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Deleting the datastore file [datastore2] vmware_temp/00d7395f-6326-4273-bd92-d50ff3a38520 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 545.570116] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f879f8ca-938b-474b-8e22-84dff9ee9b01 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.576355] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 545.576355] env[62383]: value = "task-2450921" [ 545.576355] env[62383]: _type = "Task" [ 545.576355] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.587971] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.883335] env[62383]: DEBUG nova.network.neutron [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Successfully updated port: 7e2cc599-2a7f-4045-b958-4141268a4ab9 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 546.048864] env[62383]: DEBUG nova.scheduler.client.report [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 546.067770] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52163568-d251-8f9d-0425-2adffa609e71, 'name': SearchDatastore_Task, 'duration_secs': 0.009306} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.068758] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.068993] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 546.069217] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.087555] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025422} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.088170] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 546.092323] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8560c957-a626-4721-a4c1-d3a113b80474 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.099184] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 546.099184] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52076e3e-54eb-3cbb-0312-833099d41ab1" [ 546.099184] env[62383]: _type = "Task" [ 546.099184] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.112469] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52076e3e-54eb-3cbb-0312-833099d41ab1, 'name': SearchDatastore_Task, 'duration_secs': 0.00896} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.113457] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.113457] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 7b8c8c12-fcf3-4b54-ae22-3aead1344803/7b8c8c12-fcf3-4b54-ae22-3aead1344803.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 546.113457] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.113612] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 546.113790] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09e85c4f-d603-4a0e-b72f-10dc589f7bc2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.117437] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c192095b-776c-49a7-9aa7-db74190a4c8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.122724] env[62383]: DEBUG nova.compute.manager [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Received event network-changed-1c906cf8-6b21-4337-af7e-2bd00715405e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 546.123107] env[62383]: DEBUG nova.compute.manager [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Refreshing instance network info cache due to event network-changed-1c906cf8-6b21-4337-af7e-2bd00715405e. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 546.125020] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] Acquiring lock "refresh_cache-7b8c8c12-fcf3-4b54-ae22-3aead1344803" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.125020] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] Acquired lock "refresh_cache-7b8c8c12-fcf3-4b54-ae22-3aead1344803" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.125020] env[62383]: DEBUG nova.network.neutron [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Refreshing network info cache for port 1c906cf8-6b21-4337-af7e-2bd00715405e {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 546.128514] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 546.128749] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 546.129935] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 546.129935] env[62383]: value = "task-2450922" [ 546.129935] env[62383]: _type = "Task" [ 546.129935] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.130471] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82a71885-72b5-4c97-964c-817548f8e506 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.142922] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 546.142922] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e32f46-aa0a-e6cf-3bbc-090bcbbe3922" [ 546.142922] env[62383]: _type = "Task" [ 546.142922] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.146094] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.155158] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e32f46-aa0a-e6cf-3bbc-090bcbbe3922, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.264025] env[62383]: DEBUG nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 546.308452] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 546.310173] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.310227] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 546.314019] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.314019] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 546.314019] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 546.314019] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 546.314019] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 546.314299] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 546.314299] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 546.314299] env[62383]: DEBUG nova.virt.hardware [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 546.314299] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073e86e3-538d-4afa-adc4-60096b653c64 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.325948] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1be80a-514a-4011-9da0-0010bb894dde {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.386569] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "refresh_cache-8a2b209c-423c-446c-a769-f7d7820d46da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.386722] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquired lock "refresh_cache-8a2b209c-423c-446c-a769-f7d7820d46da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.387414] env[62383]: DEBUG nova.network.neutron [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 546.494281] env[62383]: DEBUG nova.network.neutron [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Successfully updated port: e22f03a4-9d5c-4c58-ab19-bfd809d30f2b {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 546.562559] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.563193] env[62383]: DEBUG nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 546.568242] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.447s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.570490] env[62383]: INFO nova.compute.claims [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 546.648498] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469498} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.651278] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 7b8c8c12-fcf3-4b54-ae22-3aead1344803/7b8c8c12-fcf3-4b54-ae22-3aead1344803.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 546.651504] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 546.652183] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a95afe83-744e-4a4c-a8f0-da6fcca7255b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.660869] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e32f46-aa0a-e6cf-3bbc-090bcbbe3922, 'name': SearchDatastore_Task, 'duration_secs': 0.015658} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.662941] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 546.662941] env[62383]: value = "task-2450923" [ 546.662941] env[62383]: _type = "Task" [ 546.662941] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.663221] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a08258-129f-4063-b5ef-c8a6ce628e53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.673560] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 546.673560] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b381a6-e44d-2fe5-b6f3-d435a769d28f" [ 546.673560] env[62383]: _type = "Task" [ 546.673560] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.679809] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.691594] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b381a6-e44d-2fe5-b6f3-d435a769d28f, 'name': SearchDatastore_Task, 'duration_secs': 0.008135} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 546.691879] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.692151] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8e911bad-5408-4588-9865-912ce4457d34/8e911bad-5408-4588-9865-912ce4457d34.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 546.692497] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-732d1e34-7229-47f8-a59e-29dd3edcd2f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.701118] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 546.701118] env[62383]: value = "task-2450924" [ 546.701118] env[62383]: _type = "Task" [ 546.701118] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 546.712569] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 546.808490] env[62383]: DEBUG nova.network.neutron [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Successfully created port: 5205d6ef-091d-4460-bd6c-3b1c5873c3ea {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.998818] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "refresh_cache-ab338058-13c8-4df9-ba55-fabe1952557d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.998818] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquired lock "refresh_cache-ab338058-13c8-4df9-ba55-fabe1952557d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.998818] env[62383]: DEBUG nova.network.neutron [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 547.024922] env[62383]: DEBUG nova.network.neutron [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.062577] env[62383]: DEBUG nova.network.neutron [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Updated VIF entry in instance network info cache for port 1c906cf8-6b21-4337-af7e-2bd00715405e. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 547.062577] env[62383]: DEBUG nova.network.neutron [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Updating instance_info_cache with network_info: [{"id": "1c906cf8-6b21-4337-af7e-2bd00715405e", "address": "fa:16:3e:c4:e6:79", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c906cf8-6b", "ovs_interfaceid": "1c906cf8-6b21-4337-af7e-2bd00715405e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.081518] env[62383]: DEBUG nova.compute.utils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.085542] env[62383]: DEBUG nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 547.085899] env[62383]: DEBUG nova.network.neutron [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 547.173731] env[62383]: DEBUG nova.policy [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '475ba82437a346789f01d535b1a79daf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ce3b0ac5656445fba697c05dcc53e70', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 547.181991] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068115} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.182265] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 547.183152] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e66a05-15ff-4255-92a4-41bb90061492 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.212772] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 7b8c8c12-fcf3-4b54-ae22-3aead1344803/7b8c8c12-fcf3-4b54-ae22-3aead1344803.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 547.215086] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc35630c-3279-4b8d-8e87-3758b1559878 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.238256] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465113} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.239449] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8e911bad-5408-4588-9865-912ce4457d34/8e911bad-5408-4588-9865-912ce4457d34.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 547.239743] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 547.240112] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 547.240112] env[62383]: value = "task-2450925" [ 547.240112] env[62383]: _type = "Task" [ 547.240112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.240324] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d19ea6f6-69a5-41e6-8832-5356d674d093 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.250924] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450925, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.254551] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 547.254551] env[62383]: value = "task-2450926" [ 547.254551] env[62383]: _type = "Task" [ 547.254551] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.263286] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.566795] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bddd950-1a0c-4bce-be5f-bf34a1625bd4 req-9e18a9f3-0c16-4c65-8e9d-c192b2ae6003 service nova] Releasing lock "refresh_cache-7b8c8c12-fcf3-4b54-ae22-3aead1344803" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.586907] env[62383]: DEBUG nova.network.neutron [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.592049] env[62383]: DEBUG nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 547.760023] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450925, 'name': ReconfigVM_Task, 'duration_secs': 0.315143} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.760733] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 7b8c8c12-fcf3-4b54-ae22-3aead1344803/7b8c8c12-fcf3-4b54-ae22-3aead1344803.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 547.761814] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e727f7fe-8cc4-48d8-b1b5-fd0193289a3f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.771890] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068669} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.773301] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 547.773758] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 547.773758] env[62383]: value = "task-2450927" [ 547.773758] env[62383]: _type = "Task" [ 547.773758] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.774648] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85632418-5850-43e5-b826-7c1b2126e7f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.787684] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450927, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.812361] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] 8e911bad-5408-4588-9865-912ce4457d34/8e911bad-5408-4588-9865-912ce4457d34.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 547.813052] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5fac93d-0e5e-48f9-9871-9555eb79f174 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.839471] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 547.839471] env[62383]: value = "task-2450928" [ 547.839471] env[62383]: _type = "Task" [ 547.839471] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.850027] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450928, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.868515] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f042ea-a994-4eff-ad32-8d8a71fced7f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.879855] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22acb01-eb12-4a70-bda0-de412e1cf1ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.920557] env[62383]: DEBUG nova.network.neutron [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Updating instance_info_cache with network_info: [{"id": "e22f03a4-9d5c-4c58-ab19-bfd809d30f2b", "address": "fa:16:3e:aa:f7:bd", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape22f03a4-9d", "ovs_interfaceid": "e22f03a4-9d5c-4c58-ab19-bfd809d30f2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.924574] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28306ef-42a7-49f0-8e42-4b4f3672a9c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.937018] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8743b6f5-13f4-4276-bf4d-38d83af694dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.940301] env[62383]: DEBUG nova.network.neutron [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Successfully created port: 1992c731-9b69-4b2d-8da4-293986dba848 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 547.953438] env[62383]: DEBUG nova.compute.provider_tree [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.075723] env[62383]: DEBUG nova.network.neutron [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Updating instance_info_cache with network_info: [{"id": "7e2cc599-2a7f-4045-b958-4141268a4ab9", "address": "fa:16:3e:3e:37:73", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e2cc599-2a", "ovs_interfaceid": "7e2cc599-2a7f-4045-b958-4141268a4ab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.229728] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "571a5250-8655-4f30-b193-919affbc1bd8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.229969] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "571a5250-8655-4f30-b193-919affbc1bd8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.292024] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450927, 'name': Rename_Task, 'duration_secs': 0.152509} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.293124] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 548.293417] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55cbd70a-39c2-404b-9c30-585a74d6a6e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.303118] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 548.303118] env[62383]: value = "task-2450929" [ 548.303118] env[62383]: _type = "Task" [ 548.303118] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.313115] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.352537] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450928, 'name': ReconfigVM_Task, 'duration_secs': 0.297227} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.352537] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Reconfigured VM instance instance-00000002 to attach disk [datastore2] 8e911bad-5408-4588-9865-912ce4457d34/8e911bad-5408-4588-9865-912ce4457d34.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 548.357691] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c289ec2-429d-4cce-9c15-42cd0c958d4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.365613] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 548.365613] env[62383]: value = "task-2450930" [ 548.365613] env[62383]: _type = "Task" [ 548.365613] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.379217] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450930, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.430725] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Releasing lock "refresh_cache-ab338058-13c8-4df9-ba55-fabe1952557d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.431304] env[62383]: DEBUG nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Instance network_info: |[{"id": "e22f03a4-9d5c-4c58-ab19-bfd809d30f2b", "address": "fa:16:3e:aa:f7:bd", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape22f03a4-9d", "ovs_interfaceid": "e22f03a4-9d5c-4c58-ab19-bfd809d30f2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 548.431547] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:f7:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e22f03a4-9d5c-4c58-ab19-bfd809d30f2b', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 548.446724] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Creating folder: Project (17cb0d24b1834501a56eb07ff7593774). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 548.447653] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd32f71a-405a-41b7-9e26-2bf6f7876a2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.457216] env[62383]: DEBUG nova.scheduler.client.report [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 548.466502] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Created folder: Project (17cb0d24b1834501a56eb07ff7593774) in parent group-v496304. [ 548.467285] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Creating folder: Instances. Parent ref: group-v496311. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 548.469832] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c27e6d1-1569-489a-8230-05ed85c2bd65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.483222] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Created folder: Instances in parent group-v496311. [ 548.483897] env[62383]: DEBUG oslo.service.loopingcall [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.483897] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 548.484087] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2dde9931-f76b-4d1d-9197-d94a2cfa8d12 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.507281] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 548.507281] env[62383]: value = "task-2450933" [ 548.507281] env[62383]: _type = "Task" [ 548.507281] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.517382] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450933, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.581642] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Releasing lock "refresh_cache-8a2b209c-423c-446c-a769-f7d7820d46da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.582203] env[62383]: DEBUG nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Instance network_info: |[{"id": "7e2cc599-2a7f-4045-b958-4141268a4ab9", "address": "fa:16:3e:3e:37:73", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e2cc599-2a", "ovs_interfaceid": "7e2cc599-2a7f-4045-b958-4141268a4ab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 548.582476] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:37:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e2cc599-2a7f-4045-b958-4141268a4ab9', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 548.592996] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Creating folder: Project (0162430e5f0c4e27a29777b764454fe6). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 548.593412] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2df8514d-39cb-4ae9-a9b0-0903d862a910 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.607533] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Created folder: Project (0162430e5f0c4e27a29777b764454fe6) in parent group-v496304. [ 548.607533] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Creating folder: Instances. Parent ref: group-v496314. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 548.608203] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9dc10ae-e649-4eab-add5-b2a8d0fbf449 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.615017] env[62383]: DEBUG nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 548.624341] env[62383]: DEBUG nova.compute.manager [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Received event network-changed-f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 548.624341] env[62383]: DEBUG nova.compute.manager [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Refreshing instance network info cache due to event network-changed-f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 548.624753] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Acquiring lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.625054] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Acquired lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.625342] env[62383]: DEBUG nova.network.neutron [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Refreshing network info cache for port f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 548.638988] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Created folder: Instances in parent group-v496314. [ 548.638988] env[62383]: DEBUG oslo.service.loopingcall [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.638988] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 548.638988] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-482b34db-b442-4ac9-b922-8e2a777b77a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.665044] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 548.665044] env[62383]: value = "task-2450936" [ 548.665044] env[62383]: _type = "Task" [ 548.665044] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.670572] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 548.670741] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 548.670881] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 548.671076] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 548.671599] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 548.671599] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 548.671599] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 548.671777] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 548.671867] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 548.673447] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 548.673447] env[62383]: DEBUG nova.virt.hardware [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 548.673447] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27528943-ebf2-4fb9-b4f6-187ed9e4c6cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.683519] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450936, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.689466] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dacc748-fbf2-4662-ab13-08f230763f93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.734922] env[62383]: DEBUG nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 548.826283] env[62383]: DEBUG oslo_vmware.api [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2450929, 'name': PowerOnVM_Task, 'duration_secs': 0.464185} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.826622] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 548.827082] env[62383]: INFO nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Took 11.74 seconds to spawn the instance on the hypervisor. [ 548.827386] env[62383]: DEBUG nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 548.828254] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a6439d-1554-474f-a07e-40dc7c45eab5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.882730] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450930, 'name': Rename_Task, 'duration_secs': 0.131273} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.883025] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 548.883280] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8176bb6-9d5b-439a-b109-eeccc3b5a7fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.891628] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 548.891628] env[62383]: value = "task-2450937" [ 548.891628] env[62383]: _type = "Task" [ 548.891628] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.904196] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450937, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.973760] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.973760] env[62383]: DEBUG nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 548.977182] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.701s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.977297] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.977515] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 548.977788] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.349s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.979631] env[62383]: INFO nova.compute.claims [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 548.984208] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d0ad40-0343-496c-abb5-3c786d3bf6be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.000190] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212c93d0-f4c9-4070-bece-e6ce04ab908e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.019922] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8580e8e-2c79-4de3-a9ce-c1b77c79587d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.031405] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fc33b9-aba9-4651-ad78-8a45c56c512a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.036014] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450933, 'name': CreateVM_Task, 'duration_secs': 0.333567} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.036786] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 549.038056] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.038510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.038875] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 549.041466] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90961d8f-5770-4d47-870c-4c6f28f3ecb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.072701] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181221MB free_disk=146GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 549.073671] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.078907] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.079535] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.079839] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 549.079839] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52bc425e-1407-3aaa-d86b-b25c48d21c74" [ 549.079839] env[62383]: _type = "Task" [ 549.079839] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.093059] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bc425e-1407-3aaa-d86b-b25c48d21c74, 'name': SearchDatastore_Task, 'duration_secs': 0.008914} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.093059] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.093059] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 549.093059] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.093562] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.093562] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 549.093562] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1836d71-f195-49b7-849c-56d2201913f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.103598] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 549.103707] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 549.105635] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b736e6b-ef6d-4a5f-8a3b-611e80495b14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.114711] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 549.114711] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520c0122-72b1-a635-b3f7-22981d668280" [ 549.114711] env[62383]: _type = "Task" [ 549.114711] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.124648] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520c0122-72b1-a635-b3f7-22981d668280, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.175744] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450936, 'name': CreateVM_Task, 'duration_secs': 0.364749} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.176036] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 549.177132] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.177344] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.177756] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 549.178073] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b0ef204-478d-470b-aba1-26d6ce80a0b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.185248] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 549.185248] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1c911-0d82-7133-e4ee-1b1979c6efc0" [ 549.185248] env[62383]: _type = "Task" [ 549.185248] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.192343] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1c911-0d82-7133-e4ee-1b1979c6efc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.355184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.362255] env[62383]: INFO nova.compute.manager [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Took 16.55 seconds to build instance. [ 549.407162] env[62383]: DEBUG oslo_vmware.api [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2450937, 'name': PowerOnVM_Task, 'duration_secs': 0.505288} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.409996] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 549.410337] env[62383]: INFO nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Took 9.99 seconds to spawn the instance on the hypervisor. [ 549.410535] env[62383]: DEBUG nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 549.411762] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcbe3a5-f6c8-43c3-8c42-94dcb29491b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.479904] env[62383]: DEBUG nova.compute.utils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 549.480349] env[62383]: DEBUG nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 549.480698] env[62383]: DEBUG nova.network.neutron [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 549.564162] env[62383]: DEBUG nova.policy [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8500ba91ab5445d82406ff31a9ea721', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e20c8351a13a427db4fccbac7108c205', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 549.626171] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520c0122-72b1-a635-b3f7-22981d668280, 'name': SearchDatastore_Task, 'duration_secs': 0.009432} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.626655] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba37ca9-7405-4f85-8814-ab40a3e07e58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.632158] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 549.632158] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e81698-568b-c804-79bb-28728d415235" [ 549.632158] env[62383]: _type = "Task" [ 549.632158] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.641259] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e81698-568b-c804-79bb-28728d415235, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.695880] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1c911-0d82-7133-e4ee-1b1979c6efc0, 'name': SearchDatastore_Task, 'duration_secs': 0.008933} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.696240] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.696725] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 549.696987] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.864861] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c7b7e551-ce85-47ed-830b-8867c721fea3 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.062s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.937840] env[62383]: INFO nova.compute.manager [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Took 16.10 seconds to build instance. [ 549.984854] env[62383]: DEBUG nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 550.079328] env[62383]: DEBUG nova.network.neutron [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Successfully created port: b1d30299-f4ce-40b4-9046-fd1d10565fd3 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 550.146443] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e81698-568b-c804-79bb-28728d415235, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.146695] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.146944] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ab338058-13c8-4df9-ba55-fabe1952557d/ab338058-13c8-4df9-ba55-fabe1952557d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 550.147314] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.147618] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 550.147844] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-971d31b8-75e0-4739-901d-e0d88042605e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.152363] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba34bd72-08d2-40aa-992e-732794c74216 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.160227] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 550.160227] env[62383]: value = "task-2450938" [ 550.160227] env[62383]: _type = "Task" [ 550.160227] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.163558] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 550.163793] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 550.164629] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a11acb54-2b4c-48e3-aa50-c783a9b41e75 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.172157] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.175493] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 550.175493] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5272ac3d-4f81-0a50-8e11-de305faebdd5" [ 550.175493] env[62383]: _type = "Task" [ 550.175493] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.184262] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5272ac3d-4f81-0a50-8e11-de305faebdd5, 'name': SearchDatastore_Task, 'duration_secs': 0.007989} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.185080] env[62383]: DEBUG nova.network.neutron [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Updated VIF entry in instance network info cache for port f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 550.185626] env[62383]: DEBUG nova.network.neutron [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Updating instance_info_cache with network_info: [{"id": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "address": "fa:16:3e:d1:3e:92", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fa0d52-c6", "ovs_interfaceid": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.191281] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0eabe0f-769d-45fb-9660-42de7e1d5dc5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.197768] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 550.197768] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525b0b9c-4659-363e-5058-71cf5eade952" [ 550.197768] env[62383]: _type = "Task" [ 550.197768] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.209759] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525b0b9c-4659-363e-5058-71cf5eade952, 'name': SearchDatastore_Task, 'duration_secs': 0.008955} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.213559] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.213559] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8a2b209c-423c-446c-a769-f7d7820d46da/8a2b209c-423c-446c-a769-f7d7820d46da.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 550.213559] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb3e7bf-3a5c-4ddc-84c3-1fa4a6a02d49 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.215199] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82dd3076-44dd-4000-a523-5b3c5d737c08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.224982] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad62213-e4d9-4c72-b19a-8e4ecadc2573 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.228845] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 550.228845] env[62383]: value = "task-2450939" [ 550.228845] env[62383]: _type = "Task" [ 550.228845] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.267038] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb6ecf0-7405-43c5-bf9e-209ac4098183 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.273197] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.278520] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82242924-db41-46e4-810e-48b50db30845 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.294367] env[62383]: DEBUG nova.compute.provider_tree [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.367832] env[62383]: DEBUG nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 550.440566] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ad3212ed-e24a-473f-863c-71d21b74c954 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "8e911bad-5408-4588-9865-912ce4457d34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.612s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.558049] env[62383]: DEBUG nova.compute.manager [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Received event network-vif-plugged-e22f03a4-9d5c-4c58-ab19-bfd809d30f2b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 550.558285] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] Acquiring lock "ab338058-13c8-4df9-ba55-fabe1952557d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.558536] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] Lock "ab338058-13c8-4df9-ba55-fabe1952557d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.559083] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] Lock "ab338058-13c8-4df9-ba55-fabe1952557d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.563556] env[62383]: DEBUG nova.compute.manager [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] No waiting events found dispatching network-vif-plugged-e22f03a4-9d5c-4c58-ab19-bfd809d30f2b {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 550.563556] env[62383]: WARNING nova.compute.manager [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Received unexpected event network-vif-plugged-e22f03a4-9d5c-4c58-ab19-bfd809d30f2b for instance with vm_state building and task_state spawning. [ 550.563556] env[62383]: DEBUG nova.compute.manager [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Received event network-changed-e22f03a4-9d5c-4c58-ab19-bfd809d30f2b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 550.563556] env[62383]: DEBUG nova.compute.manager [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Refreshing instance network info cache due to event network-changed-e22f03a4-9d5c-4c58-ab19-bfd809d30f2b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 550.563556] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] Acquiring lock "refresh_cache-ab338058-13c8-4df9-ba55-fabe1952557d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.563892] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] Acquired lock "refresh_cache-ab338058-13c8-4df9-ba55-fabe1952557d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.563892] env[62383]: DEBUG nova.network.neutron [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Refreshing network info cache for port e22f03a4-9d5c-4c58-ab19-bfd809d30f2b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 550.565544] env[62383]: DEBUG nova.network.neutron [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Successfully updated port: 5205d6ef-091d-4460-bd6c-3b1c5873c3ea {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 550.586516] env[62383]: DEBUG nova.network.neutron [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Successfully updated port: 1992c731-9b69-4b2d-8da4-293986dba848 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 550.681100] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471304} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.682055] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ab338058-13c8-4df9-ba55-fabe1952557d/ab338058-13c8-4df9-ba55-fabe1952557d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 550.683676] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 550.683676] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2849c8d1-7166-4681-a478-2ed2200020ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.692560] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Releasing lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.693282] env[62383]: DEBUG nova.compute.manager [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Received event network-vif-plugged-7e2cc599-2a7f-4045-b958-4141268a4ab9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 550.693282] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Acquiring lock "8a2b209c-423c-446c-a769-f7d7820d46da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.693393] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Lock "8a2b209c-423c-446c-a769-f7d7820d46da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.693624] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Lock "8a2b209c-423c-446c-a769-f7d7820d46da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.693860] env[62383]: DEBUG nova.compute.manager [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] No waiting events found dispatching network-vif-plugged-7e2cc599-2a7f-4045-b958-4141268a4ab9 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 550.694091] env[62383]: WARNING nova.compute.manager [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Received unexpected event network-vif-plugged-7e2cc599-2a7f-4045-b958-4141268a4ab9 for instance with vm_state building and task_state spawning. [ 550.694318] env[62383]: DEBUG nova.compute.manager [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Received event network-changed-7e2cc599-2a7f-4045-b958-4141268a4ab9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 550.694545] env[62383]: DEBUG nova.compute.manager [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Refreshing instance network info cache due to event network-changed-7e2cc599-2a7f-4045-b958-4141268a4ab9. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 550.695581] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Acquiring lock "refresh_cache-8a2b209c-423c-446c-a769-f7d7820d46da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.695581] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Acquired lock "refresh_cache-8a2b209c-423c-446c-a769-f7d7820d46da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.695581] env[62383]: DEBUG nova.network.neutron [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Refreshing network info cache for port 7e2cc599-2a7f-4045-b958-4141268a4ab9 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 550.698255] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 550.698255] env[62383]: value = "task-2450940" [ 550.698255] env[62383]: _type = "Task" [ 550.698255] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.710202] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.748147] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450939, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.769026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "8a165d96-f503-4bc5-bff4-e6a85201e137" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.769026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.800780] env[62383]: DEBUG nova.scheduler.client.report [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 550.899296] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.000801] env[62383]: DEBUG nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 551.038459] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 551.038459] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.038459] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 551.039084] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.039084] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 551.039084] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 551.039084] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 551.039848] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 551.039848] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 551.040044] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 551.040111] env[62383]: DEBUG nova.virt.hardware [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 551.044022] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a27341-1918-4ad9-8907-389d7332cfdc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.054983] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1ea0a1-cb85-4fa9-9983-473b7325434c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.067532] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.067685] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.067837] env[62383]: DEBUG nova.network.neutron [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 551.090577] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.090577] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquired lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.090577] env[62383]: DEBUG nova.network.neutron [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 551.214265] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09042} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.214679] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 551.215447] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ca9a55-28a9-4cf3-be47-b16a3522a993 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.243549] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] ab338058-13c8-4df9-ba55-fabe1952557d/ab338058-13c8-4df9-ba55-fabe1952557d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 551.247233] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dcfb50e-91a6-406c-a544-0085dbb494b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.273153] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747227} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.273153] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8a2b209c-423c-446c-a769-f7d7820d46da/8a2b209c-423c-446c-a769-f7d7820d46da.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 551.273153] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 551.273153] env[62383]: DEBUG nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 551.275811] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 551.275811] env[62383]: value = "task-2450941" [ 551.275811] env[62383]: _type = "Task" [ 551.275811] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.276096] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-304b84b1-84c0-416a-b58b-be89be0c35d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.287886] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450941, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.289883] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 551.289883] env[62383]: value = "task-2450942" [ 551.289883] env[62383]: _type = "Task" [ 551.289883] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.296642] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.308224] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.309627] env[62383]: DEBUG nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 551.313373] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.497s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.314941] env[62383]: INFO nova.compute.claims [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 551.615570] env[62383]: DEBUG nova.network.neutron [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Updated VIF entry in instance network info cache for port e22f03a4-9d5c-4c58-ab19-bfd809d30f2b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 551.615844] env[62383]: DEBUG nova.network.neutron [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Updating instance_info_cache with network_info: [{"id": "e22f03a4-9d5c-4c58-ab19-bfd809d30f2b", "address": "fa:16:3e:aa:f7:bd", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape22f03a4-9d", "ovs_interfaceid": "e22f03a4-9d5c-4c58-ab19-bfd809d30f2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.665969] env[62383]: DEBUG nova.network.neutron [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.739660] env[62383]: DEBUG nova.network.neutron [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.807201] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450941, 'name': ReconfigVM_Task, 'duration_secs': 0.313911} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.808637] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Reconfigured VM instance instance-00000004 to attach disk [datastore2] ab338058-13c8-4df9-ba55-fabe1952557d/ab338058-13c8-4df9-ba55-fabe1952557d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 551.809659] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fd2983a-24bc-47e8-a5a0-5e7a8497df9a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.818203] env[62383]: DEBUG nova.compute.utils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 551.820682] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078356} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.820682] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.821397] env[62383]: DEBUG nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 551.821603] env[62383]: DEBUG nova.network.neutron [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 551.825368] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 551.829751] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdbb73b-63a7-4b5d-87aa-e69b75c57cfa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.832995] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 551.832995] env[62383]: value = "task-2450943" [ 551.832995] env[62383]: _type = "Task" [ 551.832995] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.856956] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 8a2b209c-423c-446c-a769-f7d7820d46da/8a2b209c-423c-446c-a769-f7d7820d46da.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 551.858663] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0914298-d665-4fc8-8938-a0e0a891801e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.880864] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450943, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.887289] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 551.887289] env[62383]: value = "task-2450944" [ 551.887289] env[62383]: _type = "Task" [ 551.887289] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.896405] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450944, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.128951] env[62383]: DEBUG nova.policy [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db7e9998210e485fa855f0375f63ad55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35016a724e7e4fa2b0fc19396d8e736b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 552.130654] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ac050f7-7983-45e2-90a5-05536b79f826 req-2c7ddd44-dbba-4beb-99c7-5fb562aa0af9 service nova] Releasing lock "refresh_cache-ab338058-13c8-4df9-ba55-fabe1952557d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.237018] env[62383]: DEBUG nova.network.neutron [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Updating instance_info_cache with network_info: [{"id": "1992c731-9b69-4b2d-8da4-293986dba848", "address": "fa:16:3e:2b:4f:f1", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1992c731-9b", "ovs_interfaceid": "1992c731-9b69-4b2d-8da4-293986dba848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.263827] env[62383]: DEBUG nova.network.neutron [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Updated VIF entry in instance network info cache for port 7e2cc599-2a7f-4045-b958-4141268a4ab9. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 552.264210] env[62383]: DEBUG nova.network.neutron [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Updating instance_info_cache with network_info: [{"id": "7e2cc599-2a7f-4045-b958-4141268a4ab9", "address": "fa:16:3e:3e:37:73", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e2cc599-2a", "ovs_interfaceid": "7e2cc599-2a7f-4045-b958-4141268a4ab9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.329114] env[62383]: DEBUG nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 552.346445] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450943, 'name': Rename_Task, 'duration_secs': 0.145266} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.346701] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 552.346939] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-681d0db7-6397-455c-8993-4fa7d5a2af20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.353103] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 552.353103] env[62383]: value = "task-2450945" [ 552.353103] env[62383]: _type = "Task" [ 552.353103] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.366253] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450945, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.401400] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450944, 'name': ReconfigVM_Task, 'duration_secs': 0.301063} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.401400] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 8a2b209c-423c-446c-a769-f7d7820d46da/8a2b209c-423c-446c-a769-f7d7820d46da.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 552.402104] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46497aac-e595-4b48-96bb-bec8a816670c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.415289] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 552.415289] env[62383]: value = "task-2450946" [ 552.415289] env[62383]: _type = "Task" [ 552.415289] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.428995] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450946, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.487601] env[62383]: DEBUG nova.network.neutron [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance_info_cache with network_info: [{"id": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "address": "fa:16:3e:21:59:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5205d6ef-09", "ovs_interfaceid": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.576018] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b09d990-5fef-41b2-aaeb-aeffe84ce742 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.587661] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff966b01-e6d3-40e5-a04a-bd30551c94f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.622485] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd8486f-247a-4b28-9208-29631605d663 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.631415] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9d1f99-473c-4583-bddc-f423e0b82304 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.649834] env[62383]: DEBUG nova.compute.provider_tree [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.714385] env[62383]: DEBUG nova.network.neutron [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Successfully updated port: b1d30299-f4ce-40b4-9046-fd1d10565fd3 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 552.741907] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Releasing lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.742267] env[62383]: DEBUG nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Instance network_info: |[{"id": "1992c731-9b69-4b2d-8da4-293986dba848", "address": "fa:16:3e:2b:4f:f1", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1992c731-9b", "ovs_interfaceid": "1992c731-9b69-4b2d-8da4-293986dba848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 552.743072] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:4f:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1992c731-9b69-4b2d-8da4-293986dba848', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 552.754609] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Creating folder: Project (4ce3b0ac5656445fba697c05dcc53e70). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 552.755252] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb8bfe7a-1538-4eee-ac66-b08e03355a75 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.767546] env[62383]: DEBUG oslo_concurrency.lockutils [req-936bdcf6-a361-4ec3-a468-eb2f78be51f7 req-e8765743-70eb-4ad2-849d-0bd1a47c85a7 service nova] Releasing lock "refresh_cache-8a2b209c-423c-446c-a769-f7d7820d46da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.767989] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Created folder: Project (4ce3b0ac5656445fba697c05dcc53e70) in parent group-v496304. [ 552.768181] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Creating folder: Instances. Parent ref: group-v496317. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 552.768442] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d240a7e-aef4-4b5b-9805-0fe31bf6f832 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.777320] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Created folder: Instances in parent group-v496317. [ 552.777494] env[62383]: DEBUG oslo.service.loopingcall [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 552.777565] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 552.777778] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b957075-6b8c-4491-bae2-0b505e38cd20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.802255] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 552.802255] env[62383]: value = "task-2450949" [ 552.802255] env[62383]: _type = "Task" [ 552.802255] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.810498] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450949, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.858918] env[62383]: DEBUG nova.network.neutron [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Successfully created port: 5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 552.870280] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450945, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.930503] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450946, 'name': Rename_Task, 'duration_secs': 0.173482} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.930976] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 552.931947] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0437ede1-f492-44b0-bada-043f60388ba3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.938244] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 552.938244] env[62383]: value = "task-2450950" [ 552.938244] env[62383]: _type = "Task" [ 552.938244] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.952938] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450950, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.992170] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.992657] env[62383]: DEBUG nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Instance network_info: |[{"id": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "address": "fa:16:3e:21:59:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5205d6ef-09", "ovs_interfaceid": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 552.993208] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:59:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5205d6ef-091d-4460-bd6c-3b1c5873c3ea', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 553.003755] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Creating folder: Project (a8d1b45dd8d74bf9a01173d57990d06b). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 553.004110] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-30a0aa4f-e38a-4149-8d1b-206901b6d38d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.015160] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Created folder: Project (a8d1b45dd8d74bf9a01173d57990d06b) in parent group-v496304. [ 553.015382] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Creating folder: Instances. Parent ref: group-v496320. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 553.016152] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c14f5440-f668-4592-91d8-7d8ad27c1598 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.026987] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Created folder: Instances in parent group-v496320. [ 553.027260] env[62383]: DEBUG oslo.service.loopingcall [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.027680] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 553.027680] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c8077e4-80bc-4ffe-a4a4-269bbe639090 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.047922] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 553.047922] env[62383]: value = "task-2450953" [ 553.047922] env[62383]: _type = "Task" [ 553.047922] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.056892] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450953, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.153247] env[62383]: DEBUG nova.scheduler.client.report [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 553.213458] env[62383]: DEBUG nova.compute.manager [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Received event network-vif-plugged-5205d6ef-091d-4460-bd6c-3b1c5873c3ea {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 553.216084] env[62383]: DEBUG oslo_concurrency.lockutils [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] Acquiring lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.216084] env[62383]: DEBUG oslo_concurrency.lockutils [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.004s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.216084] env[62383]: DEBUG oslo_concurrency.lockutils [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.216204] env[62383]: DEBUG nova.compute.manager [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] No waiting events found dispatching network-vif-plugged-5205d6ef-091d-4460-bd6c-3b1c5873c3ea {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 553.216538] env[62383]: WARNING nova.compute.manager [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Received unexpected event network-vif-plugged-5205d6ef-091d-4460-bd6c-3b1c5873c3ea for instance with vm_state building and task_state spawning. [ 553.216686] env[62383]: DEBUG nova.compute.manager [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Received event network-changed-5205d6ef-091d-4460-bd6c-3b1c5873c3ea {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 553.216921] env[62383]: DEBUG nova.compute.manager [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Refreshing instance network info cache due to event network-changed-5205d6ef-091d-4460-bd6c-3b1c5873c3ea. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 553.217202] env[62383]: DEBUG oslo_concurrency.lockutils [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] Acquiring lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.217998] env[62383]: DEBUG oslo_concurrency.lockutils [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] Acquired lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.217998] env[62383]: DEBUG nova.network.neutron [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Refreshing network info cache for port 5205d6ef-091d-4460-bd6c-3b1c5873c3ea {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.220292] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "refresh_cache-1a740010-ddd0-4df6-8ae6-02f1ed50137f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.220541] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "refresh_cache-1a740010-ddd0-4df6-8ae6-02f1ed50137f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.221070] env[62383]: DEBUG nova.network.neutron [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 553.316591] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450949, 'name': CreateVM_Task, 'duration_secs': 0.5136} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.316754] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 553.317589] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.317762] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.318121] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 553.318484] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98125323-2248-4944-83a6-95e4e2c88724 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.323972] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 553.323972] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52286331-6b77-5943-ca1f-e681652ce4d6" [ 553.323972] env[62383]: _type = "Task" [ 553.323972] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.336128] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52286331-6b77-5943-ca1f-e681652ce4d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.340145] env[62383]: DEBUG nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 553.372508] env[62383]: DEBUG oslo_vmware.api [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450945, 'name': PowerOnVM_Task, 'duration_secs': 0.675793} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.374940] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 553.375728] env[62383]: INFO nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Took 9.47 seconds to spawn the instance on the hypervisor. [ 553.376132] env[62383]: DEBUG nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 553.377235] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37754358-a658-4528-9e52-402ee717df17 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.390301] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 553.390846] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.390846] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 553.390975] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.391084] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 553.391233] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 553.391454] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 553.391680] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 553.391866] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 553.392144] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 553.395087] env[62383]: DEBUG nova.virt.hardware [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 553.396725] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1cd454-97ca-463c-976f-f965bfc96c96 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.409923] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddd07d4-2b60-49c6-b161-4e5b8587d3d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.419337] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "14bb9b79-d224-4a64-861e-30dd919c5741" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.419592] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.449970] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450950, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.561023] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450953, 'name': CreateVM_Task, 'duration_secs': 0.340195} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.561023] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 553.561519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.659631] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.660313] env[62383]: DEBUG nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 553.664141] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.591s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.803838] env[62383]: DEBUG nova.network.neutron [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.839642] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52286331-6b77-5943-ca1f-e681652ce4d6, 'name': SearchDatastore_Task, 'duration_secs': 0.010164} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.839974] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.840216] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 553.840454] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.840600] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.840778] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 553.843340] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.843650] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 553.843889] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b67b2829-9e18-4380-bcb6-70c29c40d972 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.845853] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5450ecd3-55c7-440f-b075-523b6c426fd9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.851697] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 553.851697] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f96600-6d0e-6fc9-9392-67d4051d8a4a" [ 553.851697] env[62383]: _type = "Task" [ 553.851697] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.855428] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 553.855602] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 553.857055] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41e82b56-1f76-4616-9bc1-6d49fe9004f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.865744] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f96600-6d0e-6fc9-9392-67d4051d8a4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.868988] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 553.868988] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52565a75-abf1-6e89-7306-37301475a272" [ 553.868988] env[62383]: _type = "Task" [ 553.868988] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.879306] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52565a75-abf1-6e89-7306-37301475a272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.882132] env[62383]: DEBUG nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Received event network-vif-plugged-1992c731-9b69-4b2d-8da4-293986dba848 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 553.882420] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Acquiring lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.882780] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.883044] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.883313] env[62383]: DEBUG nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] No waiting events found dispatching network-vif-plugged-1992c731-9b69-4b2d-8da4-293986dba848 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 553.883687] env[62383]: WARNING nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Received unexpected event network-vif-plugged-1992c731-9b69-4b2d-8da4-293986dba848 for instance with vm_state building and task_state spawning. [ 553.884133] env[62383]: DEBUG nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Received event network-changed-1992c731-9b69-4b2d-8da4-293986dba848 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 553.884440] env[62383]: DEBUG nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Refreshing instance network info cache due to event network-changed-1992c731-9b69-4b2d-8da4-293986dba848. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 553.884783] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Acquiring lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.884994] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Acquired lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.885229] env[62383]: DEBUG nova.network.neutron [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Refreshing network info cache for port 1992c731-9b69-4b2d-8da4-293986dba848 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 553.909882] env[62383]: INFO nova.compute.manager [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Took 19.08 seconds to build instance. [ 553.949110] env[62383]: DEBUG oslo_vmware.api [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450950, 'name': PowerOnVM_Task, 'duration_secs': 0.540507} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.949422] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 553.949796] env[62383]: INFO nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Took 12.29 seconds to spawn the instance on the hypervisor. [ 553.949796] env[62383]: DEBUG nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 553.950992] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df6202c6-8351-4399-936a-2a6d7f0ca3dd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.096798] env[62383]: DEBUG nova.network.neutron [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updated VIF entry in instance network info cache for port 5205d6ef-091d-4460-bd6c-3b1c5873c3ea. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 554.098032] env[62383]: DEBUG nova.network.neutron [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance_info_cache with network_info: [{"id": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "address": "fa:16:3e:21:59:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5205d6ef-09", "ovs_interfaceid": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.170779] env[62383]: DEBUG nova.compute.utils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 554.177503] env[62383]: DEBUG nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 554.178229] env[62383]: DEBUG nova.network.neutron [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 554.313400] env[62383]: DEBUG nova.policy [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8500ba91ab5445d82406ff31a9ea721', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e20c8351a13a427db4fccbac7108c205', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 554.363954] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f96600-6d0e-6fc9-9392-67d4051d8a4a, 'name': SearchDatastore_Task, 'duration_secs': 0.014342} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.364262] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.364493] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 554.364702] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.382711] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52565a75-abf1-6e89-7306-37301475a272, 'name': SearchDatastore_Task, 'duration_secs': 0.021439} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.383614] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0480da9d-3367-49a7-a44e-b8d2953defed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.391627] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 554.391627] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527c1b5b-973c-3050-13be-62713ae0cfd5" [ 554.391627] env[62383]: _type = "Task" [ 554.391627] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.399895] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527c1b5b-973c-3050-13be-62713ae0cfd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.412583] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b7dbf169-b031-4269-9323-edab3c751f2a tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "ab338058-13c8-4df9-ba55-fabe1952557d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.589s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.479851] env[62383]: INFO nova.compute.manager [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Took 19.86 seconds to build instance. [ 554.601716] env[62383]: DEBUG oslo_concurrency.lockutils [req-2e3568a6-02c8-4628-b669-33ec28f31f46 req-e8db274b-d509-4152-a158-aecbcf3d4b88 service nova] Releasing lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.630126] env[62383]: DEBUG nova.network.neutron [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Updating instance_info_cache with network_info: [{"id": "b1d30299-f4ce-40b4-9046-fd1d10565fd3", "address": "fa:16:3e:7c:cf:87", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d30299-f4", "ovs_interfaceid": "b1d30299-f4ce-40b4-9046-fd1d10565fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.679210] env[62383]: DEBUG nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 554.727689] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 7b8c8c12-fcf3-4b54-ae22-3aead1344803 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.727965] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8e911bad-5408-4588-9865-912ce4457d34 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.728213] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8a2b209c-423c-446c-a769-f7d7820d46da actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.728764] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ab338058-13c8-4df9-ba55-fabe1952557d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.728764] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9659a2dd-f1da-4a8e-a740-1ec01f96940c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.728936] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.729216] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1a740010-ddd0-4df6-8ae6-02f1ed50137f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.729890] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.729890] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance da16da02-25ab-46f9-9070-9fdde0b3a75e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 554.807875] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Acquiring lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.808028] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.808663] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Acquiring lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.808663] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.808789] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.811932] env[62383]: INFO nova.compute.manager [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Terminating instance [ 554.906435] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527c1b5b-973c-3050-13be-62713ae0cfd5, 'name': SearchDatastore_Task, 'duration_secs': 0.0225} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.906435] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.906726] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9659a2dd-f1da-4a8e-a740-1ec01f96940c/9659a2dd-f1da-4a8e-a740-1ec01f96940c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 554.907129] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.910281] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 554.910281] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97101357-804e-4aba-9c86-b1017680d254 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.911280] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19b206e0-fe9b-458a-ab35-6d43e6bfaa81 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.919602] env[62383]: DEBUG nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 554.923492] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 554.923492] env[62383]: value = "task-2450954" [ 554.923492] env[62383]: _type = "Task" [ 554.923492] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.926883] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 554.926883] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 554.933107] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38823306-5920-499c-9e93-20bd41c50b52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.939908] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.941890] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 554.941890] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5247b343-506c-23ce-8d98-7a1e8f18d82b" [ 554.941890] env[62383]: _type = "Task" [ 554.941890] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.953703] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5247b343-506c-23ce-8d98-7a1e8f18d82b, 'name': SearchDatastore_Task, 'duration_secs': 0.011494} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.953703] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e468a9-7273-456e-b536-73d5cf51c018 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.960344] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 554.960344] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5270e742-158b-0a8f-d865-8adc211b4340" [ 554.960344] env[62383]: _type = "Task" [ 554.960344] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.967856] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5270e742-158b-0a8f-d865-8adc211b4340, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.984254] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a70a2a1f-8975-47bf-b1ab-1aac559474cf tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "8a2b209c-423c-446c-a769-f7d7820d46da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.379s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.135410] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "refresh_cache-1a740010-ddd0-4df6-8ae6-02f1ed50137f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.135939] env[62383]: DEBUG nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance network_info: |[{"id": "b1d30299-f4ce-40b4-9046-fd1d10565fd3", "address": "fa:16:3e:7c:cf:87", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d30299-f4", "ovs_interfaceid": "b1d30299-f4ce-40b4-9046-fd1d10565fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 555.136501] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:cf:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1d30299-f4ce-40b4-9046-fd1d10565fd3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.145586] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating folder: Project (e20c8351a13a427db4fccbac7108c205). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 555.146103] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1adef7eb-e005-42d2-a6a4-fa65ed37e907 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.160865] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created folder: Project (e20c8351a13a427db4fccbac7108c205) in parent group-v496304. [ 555.161153] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating folder: Instances. Parent ref: group-v496323. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 555.161487] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90823f92-bd92-4014-9232-3209299f10a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.173194] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created folder: Instances in parent group-v496323. [ 555.173466] env[62383]: DEBUG oslo.service.loopingcall [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.173546] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 555.173822] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66dd2881-ab2a-4269-8715-7044e3046bec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.205531] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.205531] env[62383]: value = "task-2450957" [ 555.205531] env[62383]: _type = "Task" [ 555.205531] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.213694] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450957, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.238217] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 571a5250-8655-4f30-b193-919affbc1bd8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.317546] env[62383]: DEBUG nova.compute.manager [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 555.317840] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 555.319135] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc21d9fd-891e-4b00-9e39-4e0b1939680b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.329149] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 555.329480] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd11db2d-320b-436a-89d6-792eea997a8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.337973] env[62383]: DEBUG oslo_vmware.api [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Waiting for the task: (returnval){ [ 555.337973] env[62383]: value = "task-2450958" [ 555.337973] env[62383]: _type = "Task" [ 555.337973] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.351710] env[62383]: DEBUG oslo_vmware.api [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Task: {'id': task-2450958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.408128] env[62383]: DEBUG nova.network.neutron [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Updated VIF entry in instance network info cache for port 1992c731-9b69-4b2d-8da4-293986dba848. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 555.408128] env[62383]: DEBUG nova.network.neutron [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Updating instance_info_cache with network_info: [{"id": "1992c731-9b69-4b2d-8da4-293986dba848", "address": "fa:16:3e:2b:4f:f1", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1992c731-9b", "ovs_interfaceid": "1992c731-9b69-4b2d-8da4-293986dba848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.449988] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450954, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.473282] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.479324] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5270e742-158b-0a8f-d865-8adc211b4340, 'name': SearchDatastore_Task, 'duration_secs': 0.009959} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.479613] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.479872] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a8d56b8e-fa11-4844-ab65-a2e5d24b1e07/a8d56b8e-fa11-4844-ab65-a2e5d24b1e07.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 555.483354] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c68e409-1047-483e-9645-7d8546307e93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.487653] env[62383]: DEBUG nova.network.neutron [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Successfully updated port: 5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 555.495537] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.496667] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.500723] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 555.500723] env[62383]: value = "task-2450959" [ 555.500723] env[62383]: _type = "Task" [ 555.500723] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.514669] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.701736] env[62383]: DEBUG nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 555.711744] env[62383]: DEBUG nova.network.neutron [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Successfully created port: 2235952c-ebdd-41c6-9aa0-6353365f5ddf {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 555.719137] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450957, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.733644] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 555.733960] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.734197] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 555.734463] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.734665] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 555.734865] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 555.735177] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 555.735408] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 555.735617] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 555.735838] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 555.736400] env[62383]: DEBUG nova.virt.hardware [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 555.737299] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fce1494-b0ba-45a1-8cd1-94698a1c54ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.741615] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a170fd95-3f7f-4315-a063-b9d02a7a1af4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.749842] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43741022-5d50-4fd8-90c6-bf745b5ae0d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.853363] env[62383]: DEBUG oslo_vmware.api [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Task: {'id': task-2450958, 'name': PowerOffVM_Task, 'duration_secs': 0.275529} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.853643] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 555.853804] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 555.854078] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-798db745-2bf9-46cb-a47e-9f3db1dc6951 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.912261] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Releasing lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.912534] env[62383]: DEBUG nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Received event network-vif-plugged-b1d30299-f4ce-40b4-9046-fd1d10565fd3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 555.914816] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Acquiring lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.914816] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.914816] env[62383]: DEBUG oslo_concurrency.lockutils [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.914816] env[62383]: DEBUG nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] No waiting events found dispatching network-vif-plugged-b1d30299-f4ce-40b4-9046-fd1d10565fd3 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 555.914816] env[62383]: WARNING nova.compute.manager [req-422f20c5-4110-486d-9138-6eac8bf3bf5b req-775d6033-2d64-4e9f-9e17-4bdf4bf3308c service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Received unexpected event network-vif-plugged-b1d30299-f4ce-40b4-9046-fd1d10565fd3 for instance with vm_state building and task_state spawning. [ 555.924502] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 555.925748] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 555.925932] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Deleting the datastore file [datastore2] 7b8c8c12-fcf3-4b54-ae22-3aead1344803 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 555.926297] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd21c098-50c0-44db-888c-159f844fbfed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.939564] env[62383]: DEBUG oslo_vmware.api [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Waiting for the task: (returnval){ [ 555.939564] env[62383]: value = "task-2450961" [ 555.939564] env[62383]: _type = "Task" [ 555.939564] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.946891] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450954, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63216} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.948308] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9659a2dd-f1da-4a8e-a740-1ec01f96940c/9659a2dd-f1da-4a8e-a740-1ec01f96940c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 555.948308] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 555.948308] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58b39b0a-82ba-4f36-a5c8-ea3c342eb8ce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.959560] env[62383]: DEBUG oslo_vmware.api [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Task: {'id': task-2450961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.963306] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 555.963306] env[62383]: value = "task-2450962" [ 555.963306] env[62383]: _type = "Task" [ 555.963306] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.973110] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450962, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.995911] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.996073] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.996380] env[62383]: DEBUG nova.network.neutron [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 556.000861] env[62383]: DEBUG nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 556.023139] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450959, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.217880] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450957, 'name': CreateVM_Task, 'duration_secs': 0.582181} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.217880] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 556.217880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.217880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.217880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.217880] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0624fd47-827b-45d6-a3a8-f3ac53c40241 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.227080] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 556.227080] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520fed50-1e34-4db2-6cbe-d8c7111c7a16" [ 556.227080] env[62383]: _type = "Task" [ 556.227080] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.235482] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520fed50-1e34-4db2-6cbe-d8c7111c7a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.246628] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8a165d96-f503-4bc5-bff4-e6a85201e137 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.451107] env[62383]: DEBUG oslo_vmware.api [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Task: {'id': task-2450961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.388981} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.451501] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 556.451702] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 556.452040] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 556.452528] env[62383]: INFO nova.compute.manager [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Took 1.13 seconds to destroy the instance on the hypervisor. [ 556.454854] env[62383]: DEBUG oslo.service.loopingcall [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.454925] env[62383]: DEBUG nova.compute.manager [-] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 556.455050] env[62383]: DEBUG nova.network.neutron [-] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 556.477142] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450962, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118229} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.477142] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 556.477142] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48eb944-92e7-4c94-875f-394d6ca3a485 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.502384] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 9659a2dd-f1da-4a8e-a740-1ec01f96940c/9659a2dd-f1da-4a8e-a740-1ec01f96940c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 556.504735] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e0229e6-85c5-4887-856a-06dd8461e165 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.530471] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 556.530471] env[62383]: value = "task-2450963" [ 556.530471] env[62383]: _type = "Task" [ 556.530471] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.534324] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450959, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678691} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.537622] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a8d56b8e-fa11-4844-ab65-a2e5d24b1e07/a8d56b8e-fa11-4844-ab65-a2e5d24b1e07.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 556.537977] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 556.538719] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee25db2a-589b-4c30-b4cd-5469a04e8197 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.545576] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.549706] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.551372] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 556.551372] env[62383]: value = "task-2450964" [ 556.551372] env[62383]: _type = "Task" [ 556.551372] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.560283] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.587146] env[62383]: DEBUG nova.network.neutron [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.742840] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520fed50-1e34-4db2-6cbe-d8c7111c7a16, 'name': SearchDatastore_Task, 'duration_secs': 0.011342} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.742911] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.743143] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 556.743396] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.743562] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.743751] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 556.744168] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90b1e36c-814e-4974-8613-33b5e9980a7a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.760038] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 14bb9b79-d224-4a64-861e-30dd919c5741 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.760038] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 556.760038] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 556.762508] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 556.762853] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 556.763722] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ff7b70-4f9f-43dd-b343-d8765ef20081 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.771346] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 556.771346] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522bbf61-a8f2-c9d0-68f9-c25ff4e40628" [ 556.771346] env[62383]: _type = "Task" [ 556.771346] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.780134] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522bbf61-a8f2-c9d0-68f9-c25ff4e40628, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.838077] env[62383]: DEBUG nova.network.neutron [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Updating instance_info_cache with network_info: [{"id": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "address": "fa:16:3e:77:d2:90", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c1a084e-a9", "ovs_interfaceid": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.894730] env[62383]: DEBUG nova.compute.manager [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Received event network-changed-b1d30299-f4ce-40b4-9046-fd1d10565fd3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 556.894920] env[62383]: DEBUG nova.compute.manager [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Refreshing instance network info cache due to event network-changed-b1d30299-f4ce-40b4-9046-fd1d10565fd3. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 556.895179] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Acquiring lock "refresh_cache-1a740010-ddd0-4df6-8ae6-02f1ed50137f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.895326] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Acquired lock "refresh_cache-1a740010-ddd0-4df6-8ae6-02f1ed50137f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.895661] env[62383]: DEBUG nova.network.neutron [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Refreshing network info cache for port b1d30299-f4ce-40b4-9046-fd1d10565fd3 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 557.005044] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c4e773-d214-454f-abdd-f9535f1a1ac1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.016523] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5c818c-6ea2-4670-ba77-8c68bc081b1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.070747] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06d6716-18cd-4dfa-b489-774c57a2ee8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.079737] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12794} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.085012] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 557.085344] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450963, 'name': ReconfigVM_Task, 'duration_secs': 0.287894} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.086481] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960995a0-42e4-43bf-90a2-4777b5962e40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.089235] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 9659a2dd-f1da-4a8e-a740-1ec01f96940c/9659a2dd-f1da-4a8e-a740-1ec01f96940c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 557.094024] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-624af19c-76bc-4fbc-8504-6d2ecd5114b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.096980] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc910b3-9595-48e1-9174-3b78783bc30f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.123374] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] a8d56b8e-fa11-4844-ab65-a2e5d24b1e07/a8d56b8e-fa11-4844-ab65-a2e5d24b1e07.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 557.127692] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-654af880-aeb2-4285-a8c2-08c8a5453975 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.140154] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 557.140154] env[62383]: value = "task-2450965" [ 557.140154] env[62383]: _type = "Task" [ 557.140154] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.149456] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.156294] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 557.156294] env[62383]: value = "task-2450966" [ 557.156294] env[62383]: _type = "Task" [ 557.156294] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.159832] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450965, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.171977] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450966, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.282415] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522bbf61-a8f2-c9d0-68f9-c25ff4e40628, 'name': SearchDatastore_Task, 'duration_secs': 0.040593} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.284496] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1addd2df-5edb-49d9-a583-fbf57e9ddfbd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.290576] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 557.290576] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528629ee-7647-a923-dae1-141d9f485fd6" [ 557.290576] env[62383]: _type = "Task" [ 557.290576] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.300654] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528629ee-7647-a923-dae1-141d9f485fd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.340603] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.341173] env[62383]: DEBUG nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Instance network_info: |[{"id": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "address": "fa:16:3e:77:d2:90", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c1a084e-a9", "ovs_interfaceid": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 557.341366] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:d2:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 557.350018] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating folder: Project (35016a724e7e4fa2b0fc19396d8e736b). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.350323] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69b50056-a78c-42d0-88ee-e464623cdd9f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.362110] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created folder: Project (35016a724e7e4fa2b0fc19396d8e736b) in parent group-v496304. [ 557.362346] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating folder: Instances. Parent ref: group-v496326. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 557.362672] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb042305-ce15-4905-ba8f-89473ff001da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.372298] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created folder: Instances in parent group-v496326. [ 557.372622] env[62383]: DEBUG oslo.service.loopingcall [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 557.372912] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 557.373106] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f30d9010-dbad-49ed-b763-7e5547ec5d04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.394837] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 557.394837] env[62383]: value = "task-2450969" [ 557.394837] env[62383]: _type = "Task" [ 557.394837] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.404733] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450969, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.665030] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 557.680555] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450965, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.690681] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450966, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.810462] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528629ee-7647-a923-dae1-141d9f485fd6, 'name': SearchDatastore_Task, 'duration_secs': 0.024411} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.810462] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.810462] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 557.810462] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbf78833-d783-4ab1-9326-18af2d20f5b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.818176] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 557.818176] env[62383]: value = "task-2450970" [ 557.818176] env[62383]: _type = "Task" [ 557.818176] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.825231] env[62383]: DEBUG nova.network.neutron [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Updated VIF entry in instance network info cache for port b1d30299-f4ce-40b4-9046-fd1d10565fd3. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 557.825738] env[62383]: DEBUG nova.network.neutron [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Updating instance_info_cache with network_info: [{"id": "b1d30299-f4ce-40b4-9046-fd1d10565fd3", "address": "fa:16:3e:7c:cf:87", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1d30299-f4", "ovs_interfaceid": "b1d30299-f4ce-40b4-9046-fd1d10565fd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.832976] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.909278] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450969, 'name': CreateVM_Task, 'duration_secs': 0.403822} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.909278] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 557.909278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.909278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.909665] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 557.910091] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49b08f59-5c08-4333-83f3-baf3153a5f44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.915095] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 557.915095] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a4e7f4-41e1-8783-d93f-ebfe85c3d766" [ 557.915095] env[62383]: _type = "Task" [ 557.915095] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.925871] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a4e7f4-41e1-8783-d93f-ebfe85c3d766, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.996797] env[62383]: DEBUG nova.compute.manager [None req-7b3cde0c-cc56-45ad-ba37-95bfa6c4f361 tempest-ServerDiagnosticsTest-203882770 tempest-ServerDiagnosticsTest-203882770-project-admin] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 557.999848] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b35d67d-fd02-4cbc-863d-c8d70e67cdc6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.007363] env[62383]: INFO nova.compute.manager [None req-7b3cde0c-cc56-45ad-ba37-95bfa6c4f361 tempest-ServerDiagnosticsTest-203882770 tempest-ServerDiagnosticsTest-203882770-project-admin] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Retrieving diagnostics [ 558.008343] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbde24de-8de4-4150-b1ff-01466a0b0da9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.110607] env[62383]: DEBUG nova.network.neutron [-] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.120174] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "67d41910-54e1-48f1-b0d3-f34a62595ef2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.120647] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.161309] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450965, 'name': Rename_Task, 'duration_secs': 0.788471} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.162116] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 558.162116] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b642ec1-eb90-47c9-92b5-17cff83d1b48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.172130] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 558.172130] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.508s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.172401] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 558.172401] env[62383]: value = "task-2450971" [ 558.172401] env[62383]: _type = "Task" [ 558.172401] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.179977] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.825s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.183914] env[62383]: INFO nova.compute.claims [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.196619] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450971, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.199684] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450966, 'name': ReconfigVM_Task, 'duration_secs': 0.926376} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.200023] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Reconfigured VM instance instance-00000005 to attach disk [datastore2] a8d56b8e-fa11-4844-ab65-a2e5d24b1e07/a8d56b8e-fa11-4844-ab65-a2e5d24b1e07.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 558.201047] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d509a390-cb79-4761-908e-68293cb538df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.208869] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 558.208869] env[62383]: value = "task-2450972" [ 558.208869] env[62383]: _type = "Task" [ 558.208869] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.220811] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450972, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.330761] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Releasing lock "refresh_cache-1a740010-ddd0-4df6-8ae6-02f1ed50137f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.331428] env[62383]: DEBUG nova.compute.manager [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Received event network-vif-plugged-5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 558.332519] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Acquiring lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.332519] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.332519] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.333213] env[62383]: DEBUG nova.compute.manager [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] No waiting events found dispatching network-vif-plugged-5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 558.333213] env[62383]: WARNING nova.compute.manager [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Received unexpected event network-vif-plugged-5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf for instance with vm_state building and task_state spawning. [ 558.333607] env[62383]: DEBUG nova.compute.manager [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Received event network-changed-5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 558.333607] env[62383]: DEBUG nova.compute.manager [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Refreshing instance network info cache due to event network-changed-5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 558.333845] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Acquiring lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.334236] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Acquired lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.334236] env[62383]: DEBUG nova.network.neutron [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Refreshing network info cache for port 5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 558.335898] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450970, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.430104] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a4e7f4-41e1-8783-d93f-ebfe85c3d766, 'name': SearchDatastore_Task, 'duration_secs': 0.01602} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.430104] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.430104] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 558.430104] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.430376] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.430376] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 558.430376] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5da09daf-34b7-4299-bda3-1ea139aa7b25 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.442224] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 558.444019] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 558.444019] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1362e3ca-bebb-48bd-91e3-127df891ab32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.447413] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "ab338058-13c8-4df9-ba55-fabe1952557d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.447768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "ab338058-13c8-4df9-ba55-fabe1952557d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.448116] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "ab338058-13c8-4df9-ba55-fabe1952557d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.448851] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "ab338058-13c8-4df9-ba55-fabe1952557d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.449214] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "ab338058-13c8-4df9-ba55-fabe1952557d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.451683] env[62383]: INFO nova.compute.manager [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Terminating instance [ 558.454559] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 558.454559] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]523abc49-fbe3-73c0-958f-c3e04969add2" [ 558.454559] env[62383]: _type = "Task" [ 558.454559] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.464240] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523abc49-fbe3-73c0-958f-c3e04969add2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.615350] env[62383]: INFO nova.compute.manager [-] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Took 2.16 seconds to deallocate network for instance. [ 558.702369] env[62383]: DEBUG oslo_vmware.api [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450971, 'name': PowerOnVM_Task, 'duration_secs': 0.531138} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.702634] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 558.702876] env[62383]: INFO nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Took 10.09 seconds to spawn the instance on the hypervisor. [ 558.703309] env[62383]: DEBUG nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 558.704496] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b10e01-0ad6-4426-ac12-d71eef7874df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.724615] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450972, 'name': Rename_Task, 'duration_secs': 0.34582} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.724873] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 558.725213] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4f853e3-440c-4a0a-962d-98a4b2d00884 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.733472] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 558.733472] env[62383]: value = "task-2450973" [ 558.733472] env[62383]: _type = "Task" [ 558.733472] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.745160] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.831832] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582647} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.832554] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 558.833105] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 558.833548] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bae92fb-0e4f-4239-b39e-815c85a852e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.847759] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 558.847759] env[62383]: value = "task-2450974" [ 558.847759] env[62383]: _type = "Task" [ 558.847759] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.863529] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450974, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.961659] env[62383]: DEBUG nova.compute.manager [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 558.961993] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 558.962908] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50c024c-5677-4a23-bf91-7393020d3ee7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.975993] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523abc49-fbe3-73c0-958f-c3e04969add2, 'name': SearchDatastore_Task, 'duration_secs': 0.033343} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.979484] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 558.979853] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab18c411-9a62-4fdb-afbb-4b84d6c4b1b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.982394] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2faec51-82e0-4393-aea0-b579e7e19f3a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.988909] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 558.988909] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e16c80-575c-34a5-2758-a4a075a1b4aa" [ 558.988909] env[62383]: _type = "Task" [ 558.988909] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.991025] env[62383]: DEBUG oslo_vmware.api [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 558.991025] env[62383]: value = "task-2450975" [ 558.991025] env[62383]: _type = "Task" [ 558.991025] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.005033] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e16c80-575c-34a5-2758-a4a075a1b4aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.008490] env[62383]: DEBUG oslo_vmware.api [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.124730] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.245899] env[62383]: INFO nova.compute.manager [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Took 21.60 seconds to build instance. [ 559.253647] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450973, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.303717] env[62383]: DEBUG nova.network.neutron [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Successfully updated port: 2235952c-ebdd-41c6-9aa0-6353365f5ddf {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 559.359290] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450974, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075502} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.362182] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 559.364260] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d303ab4f-bc60-47f9-88e5-e09f1b773f5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.387891] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 559.391341] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3e77e0f-6dbc-4afb-b3af-868d467714ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.412674] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 559.412674] env[62383]: value = "task-2450976" [ 559.412674] env[62383]: _type = "Task" [ 559.412674] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.421689] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.501084] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7422a691-c955-4ca8-8e47-be2e8508795e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.509910] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e16c80-575c-34a5-2758-a4a075a1b4aa, 'name': SearchDatastore_Task, 'duration_secs': 0.022569} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.511405] env[62383]: DEBUG oslo_vmware.api [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450975, 'name': PowerOffVM_Task, 'duration_secs': 0.30444} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.511405] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.511405] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 2c93bdf1-aaf4-4e40-898a-634dc00d05e6/2c93bdf1-aaf4-4e40-898a-634dc00d05e6.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 559.511405] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 559.511695] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 559.511695] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c61ddffb-b2ff-4157-9a91-eb97c0fc3c81 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.513676] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aaaea492-7334-48d9-ab98-098a157e8539 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.518681] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6ec13a-4c2a-4528-8012-3efdada86fe9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.525000] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 559.525000] env[62383]: value = "task-2450977" [ 559.525000] env[62383]: _type = "Task" [ 559.525000] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.556757] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10930fb1-0eb7-4619-8739-5a7c9bcb874e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.562363] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450977, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.567552] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db09baec-e02b-4e1d-b91a-703b46dd9a4d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.585838] env[62383]: DEBUG nova.compute.provider_tree [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.592463] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 559.592597] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 559.592883] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Deleting the datastore file [datastore2] ab338058-13c8-4df9-ba55-fabe1952557d {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.593557] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41b0a1e8-aa06-4b7e-aa63-401318aaed58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.601866] env[62383]: DEBUG oslo_vmware.api [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for the task: (returnval){ [ 559.601866] env[62383]: value = "task-2450979" [ 559.601866] env[62383]: _type = "Task" [ 559.601866] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.613234] env[62383]: DEBUG oslo_vmware.api [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.749961] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2e075ee-6fb7-478a-8d7b-7a6d9f991796 tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.124s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.749961] env[62383]: DEBUG oslo_vmware.api [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2450973, 'name': PowerOnVM_Task, 'duration_secs': 0.894517} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.750794] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 559.751267] env[62383]: INFO nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Took 13.49 seconds to spawn the instance on the hypervisor. [ 559.751506] env[62383]: DEBUG nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 559.752770] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfa5d8a-75fb-4d5b-b4c0-ffb441fca5b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.767952] env[62383]: DEBUG nova.network.neutron [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Updated VIF entry in instance network info cache for port 5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 559.767952] env[62383]: DEBUG nova.network.neutron [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Updating instance_info_cache with network_info: [{"id": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "address": "fa:16:3e:77:d2:90", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c1a084e-a9", "ovs_interfaceid": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.806943] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "refresh_cache-da16da02-25ab-46f9-9070-9fdde0b3a75e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.808415] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "refresh_cache-da16da02-25ab-46f9-9070-9fdde0b3a75e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.808415] env[62383]: DEBUG nova.network.neutron [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 559.934019] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450976, 'name': ReconfigVM_Task, 'duration_secs': 0.355749} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.934327] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 559.935079] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40546af1-ac26-42d6-9d77-a01456b51aa5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.949518] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 559.949518] env[62383]: value = "task-2450980" [ 559.949518] env[62383]: _type = "Task" [ 559.949518] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.958433] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450980, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.038637] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450977, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48129} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.038923] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 2c93bdf1-aaf4-4e40-898a-634dc00d05e6/2c93bdf1-aaf4-4e40-898a-634dc00d05e6.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 560.039156] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 560.039892] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbd9a4c8-35fa-4c4c-bc15-f20b8bfe7ae9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.049168] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 560.049168] env[62383]: value = "task-2450981" [ 560.049168] env[62383]: _type = "Task" [ 560.049168] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.058444] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.094827] env[62383]: DEBUG nova.scheduler.client.report [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 560.116231] env[62383]: DEBUG oslo_vmware.api [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Task: {'id': task-2450979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.413309} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.117239] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.117410] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 560.117586] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 560.117839] env[62383]: INFO nova.compute.manager [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 560.117987] env[62383]: DEBUG oslo.service.loopingcall [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.119064] env[62383]: DEBUG nova.compute.manager [-] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 560.119064] env[62383]: DEBUG nova.network.neutron [-] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 560.256575] env[62383]: DEBUG nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 560.276990] env[62383]: DEBUG oslo_concurrency.lockutils [req-dde71828-6adc-4b41-afc8-f86a49828951 req-551a9ea5-2bda-4f1f-b95d-fe6f646e53bb service nova] Releasing lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.279968] env[62383]: INFO nova.compute.manager [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Took 23.62 seconds to build instance. [ 560.396482] env[62383]: DEBUG nova.network.neutron [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 560.461578] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450980, 'name': Rename_Task, 'duration_secs': 0.173311} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.461842] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 560.462104] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3dc3b3e-e50f-46ec-810a-34108cac51d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.468570] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 560.468570] env[62383]: value = "task-2450982" [ 560.468570] env[62383]: _type = "Task" [ 560.468570] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.476962] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.563588] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078469} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.563937] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 560.564766] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c63e802-cad4-4045-aac8-24ab25ae286f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.597041] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 2c93bdf1-aaf4-4e40-898a-634dc00d05e6/2c93bdf1-aaf4-4e40-898a-634dc00d05e6.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 560.597190] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29351a45-066b-44e9-b738-86043a4091e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.615278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.615278] env[62383]: DEBUG nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 560.616698] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.718s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.618988] env[62383]: INFO nova.compute.claims [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 560.626434] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 560.626434] env[62383]: value = "task-2450983" [ 560.626434] env[62383]: _type = "Task" [ 560.626434] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 560.635908] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450983, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 560.752829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "8a2b209c-423c-446c-a769-f7d7820d46da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.752829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "8a2b209c-423c-446c-a769-f7d7820d46da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.752829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "8a2b209c-423c-446c-a769-f7d7820d46da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.752829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "8a2b209c-423c-446c-a769-f7d7820d46da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.753143] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "8a2b209c-423c-446c-a769-f7d7820d46da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.755370] env[62383]: INFO nova.compute.manager [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Terminating instance [ 560.788358] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fced2650-6c80-4b42-aa51-fd2d02326360 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.141s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.798225] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.815801] env[62383]: DEBUG nova.compute.manager [req-710af39a-eca0-45fa-9bee-1e3eb2751a76 req-ee082eed-6a6e-4d9b-a6a6-5bece6e9ffdd service nova] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Received event network-vif-deleted-1c906cf8-6b21-4337-af7e-2bd00715405e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 560.815801] env[62383]: DEBUG nova.compute.manager [req-710af39a-eca0-45fa-9bee-1e3eb2751a76 req-ee082eed-6a6e-4d9b-a6a6-5bece6e9ffdd service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Received event network-vif-plugged-2235952c-ebdd-41c6-9aa0-6353365f5ddf {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 560.815801] env[62383]: DEBUG oslo_concurrency.lockutils [req-710af39a-eca0-45fa-9bee-1e3eb2751a76 req-ee082eed-6a6e-4d9b-a6a6-5bece6e9ffdd service nova] Acquiring lock "da16da02-25ab-46f9-9070-9fdde0b3a75e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.815801] env[62383]: DEBUG oslo_concurrency.lockutils [req-710af39a-eca0-45fa-9bee-1e3eb2751a76 req-ee082eed-6a6e-4d9b-a6a6-5bece6e9ffdd service nova] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.815801] env[62383]: DEBUG oslo_concurrency.lockutils [req-710af39a-eca0-45fa-9bee-1e3eb2751a76 req-ee082eed-6a6e-4d9b-a6a6-5bece6e9ffdd service nova] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.816339] env[62383]: DEBUG nova.compute.manager [req-710af39a-eca0-45fa-9bee-1e3eb2751a76 req-ee082eed-6a6e-4d9b-a6a6-5bece6e9ffdd service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] No waiting events found dispatching network-vif-plugged-2235952c-ebdd-41c6-9aa0-6353365f5ddf {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 560.819318] env[62383]: WARNING nova.compute.manager [req-710af39a-eca0-45fa-9bee-1e3eb2751a76 req-ee082eed-6a6e-4d9b-a6a6-5bece6e9ffdd service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Received unexpected event network-vif-plugged-2235952c-ebdd-41c6-9aa0-6353365f5ddf for instance with vm_state building and task_state spawning. [ 560.984179] env[62383]: DEBUG oslo_vmware.api [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450982, 'name': PowerOnVM_Task, 'duration_secs': 0.495035} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.984179] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 560.984179] env[62383]: INFO nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Took 9.98 seconds to spawn the instance on the hypervisor. [ 560.984179] env[62383]: DEBUG nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 560.984851] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14cbdb2-e1af-4777-9416-a399e045c1c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.072063] env[62383]: DEBUG nova.network.neutron [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Updating instance_info_cache with network_info: [{"id": "2235952c-ebdd-41c6-9aa0-6353365f5ddf", "address": "fa:16:3e:dc:61:ad", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2235952c-eb", "ovs_interfaceid": "2235952c-ebdd-41c6-9aa0-6353365f5ddf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.073527] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.073805] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.129512] env[62383]: DEBUG nova.compute.utils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 561.131518] env[62383]: DEBUG nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 561.131751] env[62383]: DEBUG nova.network.neutron [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 561.147305] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450983, 'name': ReconfigVM_Task, 'duration_secs': 0.414819} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.147305] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 2c93bdf1-aaf4-4e40-898a-634dc00d05e6/2c93bdf1-aaf4-4e40-898a-634dc00d05e6.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 561.148409] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9694f99b-4e88-4d2a-8461-cd49459fa199 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.155549] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 561.155549] env[62383]: value = "task-2450984" [ 561.155549] env[62383]: _type = "Task" [ 561.155549] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.167430] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450984, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.260403] env[62383]: DEBUG nova.compute.manager [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 561.260684] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 561.264752] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35727544-87a3-44f5-9d0d-b8e771b80e03 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.274768] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 561.274768] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-066cd635-b126-45de-87d3-70f0dad8f995 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.284723] env[62383]: DEBUG oslo_vmware.api [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 561.284723] env[62383]: value = "task-2450985" [ 561.284723] env[62383]: _type = "Task" [ 561.284723] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.296146] env[62383]: DEBUG oslo_vmware.api [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.298974] env[62383]: DEBUG nova.policy [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fb0837588224498e840a5c23cff647b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d57565533a44cb2b6bbb1b626fc66f1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 561.333272] env[62383]: DEBUG nova.network.neutron [-] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.510330] env[62383]: INFO nova.compute.manager [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Took 20.41 seconds to build instance. [ 561.576217] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "refresh_cache-da16da02-25ab-46f9-9070-9fdde0b3a75e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.576884] env[62383]: DEBUG nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Instance network_info: |[{"id": "2235952c-ebdd-41c6-9aa0-6353365f5ddf", "address": "fa:16:3e:dc:61:ad", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2235952c-eb", "ovs_interfaceid": "2235952c-ebdd-41c6-9aa0-6353365f5ddf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 561.577497] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:61:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2235952c-ebdd-41c6-9aa0-6353365f5ddf', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 561.587733] env[62383]: DEBUG oslo.service.loopingcall [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 561.589268] env[62383]: DEBUG nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 561.592374] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 561.593701] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce7b2105-5b1f-484b-91cf-0c9a30d9e625 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.619738] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 561.619738] env[62383]: value = "task-2450986" [ 561.619738] env[62383]: _type = "Task" [ 561.619738] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.634638] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450986, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.637465] env[62383]: DEBUG nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 561.670570] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450984, 'name': Rename_Task, 'duration_secs': 0.214386} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.673153] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 561.673606] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-148f2287-5dc5-42a0-a50f-b0ee2be5b939 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.684455] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 561.684455] env[62383]: value = "task-2450987" [ 561.684455] env[62383]: _type = "Task" [ 561.684455] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.694697] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450987, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.759381] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "80821717-f961-49c7-8b79-c152edfdfb94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.759613] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.817756] env[62383]: DEBUG oslo_vmware.api [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450985, 'name': PowerOffVM_Task, 'duration_secs': 0.231429} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 561.818430] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 561.818669] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 561.818899] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24d6b77d-153a-495d-9f4e-db244938accb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.836306] env[62383]: INFO nova.compute.manager [-] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Took 1.72 seconds to deallocate network for instance. [ 561.886735] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 561.886952] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 561.887147] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Deleting the datastore file [datastore2] 8a2b209c-423c-446c-a769-f7d7820d46da {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 561.887472] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c729d080-7626-4686-919a-09c96428ceec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.896816] env[62383]: DEBUG oslo_vmware.api [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for the task: (returnval){ [ 561.896816] env[62383]: value = "task-2450989" [ 561.896816] env[62383]: _type = "Task" [ 561.896816] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.915831] env[62383]: DEBUG oslo_vmware.api [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450989, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.982233] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91982fea-2deb-4a3d-81d0-98fec92db078 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.994322] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629f9cb9-3743-4f41-9e25-30e846a07e65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.028918] env[62383]: DEBUG oslo_concurrency.lockutils [None req-21f56849-1cce-44f5-8fb5-bd10271b3729 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.936s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 562.031421] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e7af74-5060-4d03-8faa-e1e0966e3441 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.041633] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef29ae1-00f8-4949-8f8d-460cfc80fdc3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.060114] env[62383]: DEBUG nova.compute.provider_tree [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.119060] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.130108] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450986, 'name': CreateVM_Task, 'duration_secs': 0.443475} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.130322] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 562.131135] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.131306] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.132017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 562.132017] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e89ed50-c7c2-4681-b68c-9485a815be65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.136773] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 562.136773] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d6e43c-7957-b1f1-523b-0fc5ce14f806" [ 562.136773] env[62383]: _type = "Task" [ 562.136773] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.149472] env[62383]: DEBUG nova.compute.manager [req-920e049e-2a02-410f-8ac1-9cec78862a31 req-279573a8-d36d-41a0-a526-af5036a902f4 service nova] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Received event network-vif-deleted-e22f03a4-9d5c-4c58-ab19-bfd809d30f2b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 562.154225] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d6e43c-7957-b1f1-523b-0fc5ce14f806, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.197494] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450987, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.296915] env[62383]: DEBUG nova.network.neutron [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Successfully created port: 28133d04-f592-4f43-9ade-58deef12e1f2 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.343303] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.408682] env[62383]: DEBUG oslo_vmware.api [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Task: {'id': task-2450989, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228129} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.408959] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 562.409182] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 562.409362] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 562.409554] env[62383]: INFO nova.compute.manager [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Took 1.15 seconds to destroy the instance on the hypervisor. [ 562.409796] env[62383]: DEBUG oslo.service.loopingcall [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.410009] env[62383]: DEBUG nova.compute.manager [-] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 562.410104] env[62383]: DEBUG nova.network.neutron [-] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 562.536479] env[62383]: DEBUG nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 562.564455] env[62383]: DEBUG nova.scheduler.client.report [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 562.628602] env[62383]: DEBUG nova.compute.manager [None req-bb2ccc66-465f-4a66-875f-767c6290b148 tempest-ServerExternalEventsTest-1331061514 tempest-ServerExternalEventsTest-1331061514-project] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Received event network-changed {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 562.628872] env[62383]: DEBUG nova.compute.manager [None req-bb2ccc66-465f-4a66-875f-767c6290b148 tempest-ServerExternalEventsTest-1331061514 tempest-ServerExternalEventsTest-1331061514-project] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Refreshing instance network info cache due to event network-changed. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 562.629483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb2ccc66-465f-4a66-875f-767c6290b148 tempest-ServerExternalEventsTest-1331061514 tempest-ServerExternalEventsTest-1331061514-project] Acquiring lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.629597] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb2ccc66-465f-4a66-875f-767c6290b148 tempest-ServerExternalEventsTest-1331061514 tempest-ServerExternalEventsTest-1331061514-project] Acquired lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.630200] env[62383]: DEBUG nova.network.neutron [None req-bb2ccc66-465f-4a66-875f-767c6290b148 tempest-ServerExternalEventsTest-1331061514 tempest-ServerExternalEventsTest-1331061514-project] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 562.648390] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d6e43c-7957-b1f1-523b-0fc5ce14f806, 'name': SearchDatastore_Task, 'duration_secs': 0.011614} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.648748] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.648982] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 562.649225] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 562.649388] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 562.649616] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 562.650152] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af23743e-b177-4338-b377-7b9ece37a157 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.654632] env[62383]: DEBUG nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 562.662013] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 562.662270] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 562.662932] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b095e262-71c0-469f-851f-68ca4c4d489b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.669338] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 562.669338] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6fbc5-0815-15c7-e6b0-ef928705894d" [ 562.669338] env[62383]: _type = "Task" [ 562.669338] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.678772] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6fbc5-0815-15c7-e6b0-ef928705894d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.691472] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 562.691724] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 562.691878] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 562.692069] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 562.692217] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 562.692365] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 562.692570] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 562.692727] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 562.692896] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 562.693141] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 562.693398] env[62383]: DEBUG nova.virt.hardware [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 562.694274] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e6fff3-05f5-4cb7-b8eb-1f1d483f6cb0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.703377] env[62383]: DEBUG oslo_vmware.api [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2450987, 'name': PowerOnVM_Task, 'duration_secs': 0.616113} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.706198] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 562.706399] env[62383]: INFO nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Took 9.37 seconds to spawn the instance on the hypervisor. [ 562.706574] env[62383]: DEBUG nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 562.707381] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5b9726-bf10-492a-a2e7-465071ca8b24 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.711385] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23eeb271-39da-42b0-84b2-be9c4126d55f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.067782] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.071014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.071494] env[62383]: DEBUG nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 563.074804] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.254s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.082541] env[62383]: INFO nova.compute.claims [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.181137] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6fbc5-0815-15c7-e6b0-ef928705894d, 'name': SearchDatastore_Task, 'duration_secs': 0.034414} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.182123] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1acfc122-0f8d-47d1-8d6f-698a062e6e5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.188230] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 563.188230] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5290ff85-390b-b152-fe91-e59e5b284d54" [ 563.188230] env[62383]: _type = "Task" [ 563.188230] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.197500] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5290ff85-390b-b152-fe91-e59e5b284d54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.243757] env[62383]: INFO nova.compute.manager [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Took 18.64 seconds to build instance. [ 563.557690] env[62383]: DEBUG nova.network.neutron [None req-bb2ccc66-465f-4a66-875f-767c6290b148 tempest-ServerExternalEventsTest-1331061514 tempest-ServerExternalEventsTest-1331061514-project] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Updating instance_info_cache with network_info: [{"id": "1992c731-9b69-4b2d-8da4-293986dba848", "address": "fa:16:3e:2b:4f:f1", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1992c731-9b", "ovs_interfaceid": "1992c731-9b69-4b2d-8da4-293986dba848", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.585455] env[62383]: DEBUG nova.compute.utils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 563.589639] env[62383]: DEBUG nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 563.589853] env[62383]: DEBUG nova.network.neutron [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 563.595426] env[62383]: DEBUG nova.network.neutron [-] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.697025] env[62383]: DEBUG nova.policy [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bda8cb7b1005458ca6fc7e5ca6882e6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '603ba5501c904542b6ff0935f620e6da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 563.704612] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5290ff85-390b-b152-fe91-e59e5b284d54, 'name': SearchDatastore_Task, 'duration_secs': 0.012734} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 563.705275] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.705602] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] da16da02-25ab-46f9-9070-9fdde0b3a75e/da16da02-25ab-46f9-9070-9fdde0b3a75e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 563.706166] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c326a3b-35e3-464a-b1a6-119f4482f5ee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.714647] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 563.714647] env[62383]: value = "task-2450990" [ 563.714647] env[62383]: _type = "Task" [ 563.714647] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 563.723274] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 563.745496] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d388b14-8252-4637-a3c1-152a21e5fb3f tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.150s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.950620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "69569fa0-5175-453e-9875-9ef46c723da8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.950890] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "69569fa0-5175-453e-9875-9ef46c723da8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.063369] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb2ccc66-465f-4a66-875f-767c6290b148 tempest-ServerExternalEventsTest-1331061514 tempest-ServerExternalEventsTest-1331061514-project] Releasing lock "refresh_cache-9659a2dd-f1da-4a8e-a740-1ec01f96940c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 564.071897] env[62383]: DEBUG nova.network.neutron [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Successfully updated port: 28133d04-f592-4f43-9ade-58deef12e1f2 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 564.099350] env[62383]: DEBUG nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 564.103976] env[62383]: INFO nova.compute.manager [-] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Took 1.69 seconds to deallocate network for instance. [ 564.229047] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450990, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.254531] env[62383]: DEBUG nova.compute.manager [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 564.261920] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "184d0caa-85c2-426d-82e5-ac52e525fe74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.263048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.456402] env[62383]: DEBUG nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 564.528819] env[62383]: DEBUG nova.network.neutron [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Successfully created port: d1a6d806-7723-4d98-843f-fe34d9a9f94c {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.556878] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed78285-fe58-4c56-87d8-331e97da75aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.571249] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83087ace-bef2-40da-8c44-6744d2b76be2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.578405] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "refresh_cache-571a5250-8655-4f30-b193-919affbc1bd8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.578405] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquired lock "refresh_cache-571a5250-8655-4f30-b193-919affbc1bd8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.578405] env[62383]: DEBUG nova.network.neutron [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.621021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.622015] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330f1922-91dc-4a6c-80c8-2fba5f1138e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.631130] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cce2d0-8d97-4bef-868d-d0e093cdd5c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.646419] env[62383]: DEBUG nova.compute.provider_tree [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.728495] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570827} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 564.732370] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] da16da02-25ab-46f9-9070-9fdde0b3a75e/da16da02-25ab-46f9-9070-9fdde0b3a75e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 564.732370] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 564.733705] env[62383]: DEBUG nova.compute.manager [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Received event network-changed-2235952c-ebdd-41c6-9aa0-6353365f5ddf {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 564.734009] env[62383]: DEBUG nova.compute.manager [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Refreshing instance network info cache due to event network-changed-2235952c-ebdd-41c6-9aa0-6353365f5ddf. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 564.734355] env[62383]: DEBUG oslo_concurrency.lockutils [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] Acquiring lock "refresh_cache-da16da02-25ab-46f9-9070-9fdde0b3a75e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.734424] env[62383]: DEBUG oslo_concurrency.lockutils [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] Acquired lock "refresh_cache-da16da02-25ab-46f9-9070-9fdde0b3a75e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.734577] env[62383]: DEBUG nova.network.neutron [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Refreshing network info cache for port 2235952c-ebdd-41c6-9aa0-6353365f5ddf {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 564.736252] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f426d232-b648-4686-aa4d-874d9104062d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.748875] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 564.748875] env[62383]: value = "task-2450991" [ 564.748875] env[62383]: _type = "Task" [ 564.748875] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 564.764010] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 564.778585] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.984047] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.122050] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.122162] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.122337] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.122553] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.123146] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.125632] env[62383]: DEBUG nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 565.128901] env[62383]: INFO nova.compute.manager [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Terminating instance [ 565.151604] env[62383]: DEBUG nova.scheduler.client.report [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 565.162723] env[62383]: DEBUG nova.network.neutron [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.170197] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 565.170197] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.170397] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 565.170644] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.171172] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 565.171172] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 565.171172] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 565.171395] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 565.171496] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 565.171674] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 565.171877] env[62383]: DEBUG nova.virt.hardware [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 565.173898] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28c0859-3d00-4391-82f7-375df7bcb8d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.184814] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f5be06-16f5-422a-9291-3ae50bd095ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.258789] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.276768} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 565.259059] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 565.259879] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd11b066-8caf-466c-8ecc-b4af9aed78d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.290628] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] da16da02-25ab-46f9-9070-9fdde0b3a75e/da16da02-25ab-46f9-9070-9fdde0b3a75e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 565.292573] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7afc07e0-201a-422c-8fe3-6ad9734ed8d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.318018] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 565.318018] env[62383]: value = "task-2450992" [ 565.318018] env[62383]: _type = "Task" [ 565.318018] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.326822] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450992, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.593474] env[62383]: DEBUG nova.compute.manager [req-56567c13-c74e-43ed-b4dc-b6feb4cf7394 req-9eb49ef0-c56d-4c93-845b-cc78a25e3dd2 service nova] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Received event network-vif-deleted-7e2cc599-2a7f-4045-b958-4141268a4ab9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 565.635407] env[62383]: DEBUG nova.compute.manager [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 565.635407] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 565.635407] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16286b2d-783e-4b63-a925-19cfea053e15 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.646377] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 565.651205] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b584f52-d596-4b83-b734-0ea0637a6f1b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.662155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 565.662155] env[62383]: DEBUG nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 565.665052] env[62383]: DEBUG oslo_vmware.api [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 565.665052] env[62383]: value = "task-2450993" [ 565.665052] env[62383]: _type = "Task" [ 565.665052] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.665855] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.194s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.667792] env[62383]: INFO nova.compute.claims [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.687292] env[62383]: DEBUG oslo_vmware.api [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.750278] env[62383]: DEBUG nova.network.neutron [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Updating instance_info_cache with network_info: [{"id": "28133d04-f592-4f43-9ade-58deef12e1f2", "address": "fa:16:3e:df:16:74", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28133d04-f5", "ovs_interfaceid": "28133d04-f592-4f43-9ade-58deef12e1f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.830082] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450992, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.928298] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "0d992155-24fa-4836-83c9-8f188f7d7efa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.928828] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "0d992155-24fa-4836-83c9-8f188f7d7efa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.001648] env[62383]: DEBUG nova.network.neutron [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Updated VIF entry in instance network info cache for port 2235952c-ebdd-41c6-9aa0-6353365f5ddf. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 566.002045] env[62383]: DEBUG nova.network.neutron [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Updating instance_info_cache with network_info: [{"id": "2235952c-ebdd-41c6-9aa0-6353365f5ddf", "address": "fa:16:3e:dc:61:ad", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2235952c-eb", "ovs_interfaceid": "2235952c-ebdd-41c6-9aa0-6353365f5ddf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.168878] env[62383]: DEBUG nova.compute.utils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 566.171910] env[62383]: DEBUG nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 566.172125] env[62383]: DEBUG nova.network.neutron [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 566.192269] env[62383]: DEBUG oslo_vmware.api [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450993, 'name': PowerOffVM_Task, 'duration_secs': 0.209915} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.193381] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 566.193381] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 566.193381] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d40bb65e-824d-42c0-b6b7-de20c19234d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.251073] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 566.251328] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 566.251479] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Deleting the datastore file [datastore2] 9659a2dd-f1da-4a8e-a740-1ec01f96940c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 566.251738] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01a5716d-38de-4c1b-b2bd-8a2e18d17156 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.255284] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Releasing lock "refresh_cache-571a5250-8655-4f30-b193-919affbc1bd8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.255464] env[62383]: DEBUG nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Instance network_info: |[{"id": "28133d04-f592-4f43-9ade-58deef12e1f2", "address": "fa:16:3e:df:16:74", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28133d04-f5", "ovs_interfaceid": "28133d04-f592-4f43-9ade-58deef12e1f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 566.256671] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:16:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28133d04-f592-4f43-9ade-58deef12e1f2', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 566.264161] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Creating folder: Project (3d57565533a44cb2b6bbb1b626fc66f1). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 566.265139] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d49dbca-34ac-476b-a36c-cb0aeb6ced9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.269869] env[62383]: DEBUG oslo_vmware.api [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for the task: (returnval){ [ 566.269869] env[62383]: value = "task-2450995" [ 566.269869] env[62383]: _type = "Task" [ 566.269869] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.277989] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Created folder: Project (3d57565533a44cb2b6bbb1b626fc66f1) in parent group-v496304. [ 566.278210] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Creating folder: Instances. Parent ref: group-v496330. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 566.281876] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95f0c4a1-a915-4a67-928c-021305016cfb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.282946] env[62383]: DEBUG oslo_vmware.api [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450995, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.289865] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Created folder: Instances in parent group-v496330. [ 566.290150] env[62383]: DEBUG oslo.service.loopingcall [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.290369] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 566.290500] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3646461-10cc-4bbb-b96c-79f666b1ce4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.310239] env[62383]: DEBUG nova.policy [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cce49ef14f3a474c9448607425da3dc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2439f3d802f34027b12d50f242a54ba3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 566.316296] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 566.316296] env[62383]: value = "task-2450998" [ 566.316296] env[62383]: _type = "Task" [ 566.316296] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.328479] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450998, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.331879] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450992, 'name': ReconfigVM_Task, 'duration_secs': 0.546172} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.332374] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Reconfigured VM instance instance-00000009 to attach disk [datastore2] da16da02-25ab-46f9-9070-9fdde0b3a75e/da16da02-25ab-46f9-9070-9fdde0b3a75e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 566.333417] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c48b2c17-a722-4f97-b942-c69999470186 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.339883] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 566.339883] env[62383]: value = "task-2450999" [ 566.339883] env[62383]: _type = "Task" [ 566.339883] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.348413] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450999, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.505136] env[62383]: DEBUG oslo_concurrency.lockutils [req-cb959885-c78f-4fd5-b931-ad6eb5fd40db req-99b60ed5-b322-49d2-a177-1237bf9528df service nova] Releasing lock "refresh_cache-da16da02-25ab-46f9-9070-9fdde0b3a75e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.678483] env[62383]: DEBUG nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 566.778924] env[62383]: DEBUG oslo_vmware.api [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Task: {'id': task-2450995, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236409} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.779734] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 566.779734] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 566.779734] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 566.779928] env[62383]: INFO nova.compute.manager [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 566.779983] env[62383]: DEBUG oslo.service.loopingcall [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 566.780179] env[62383]: DEBUG nova.compute.manager [-] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 566.780275] env[62383]: DEBUG nova.network.neutron [-] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 566.824476] env[62383]: DEBUG nova.network.neutron [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Successfully updated port: d1a6d806-7723-4d98-843f-fe34d9a9f94c {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 566.834028] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2450998, 'name': CreateVM_Task, 'duration_secs': 0.468469} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.835231] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 566.835382] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.835543] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.835765] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 566.836123] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b872e0f-9ff3-4d54-8021-84c64e454b50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.841780] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 566.841780] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52bdbca9-f769-e0d6-ef32-21307ef5d091" [ 566.841780] env[62383]: _type = "Task" [ 566.841780] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.860037] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bdbca9-f769-e0d6-ef32-21307ef5d091, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.860395] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2450999, 'name': Rename_Task, 'duration_secs': 0.178886} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.864885] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 566.865425] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a424933a-27ed-40db-a312-5cebb6e73185 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.872472] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 566.872472] env[62383]: value = "task-2451000" [ 566.872472] env[62383]: _type = "Task" [ 566.872472] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.882720] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.029768] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae2513c-b64a-4fc3-a5bd-def9bbaa6551 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.037237] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5471681-3f36-49ca-80a1-d965ab35efce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.072651] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5273bd59-b4eb-4153-84cb-50d9f62fb2a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.080574] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dc00d4-e9d7-4747-83b8-d63ae4df4775 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.094323] env[62383]: DEBUG nova.compute.provider_tree [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.277030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d4d19eca-d4ab-48d7-a88e-d1b6c6391292 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "905304d9-0288-442a-a024-625dc212e5b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.277270] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d4d19eca-d4ab-48d7-a88e-d1b6c6391292 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "905304d9-0288-442a-a024-625dc212e5b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.338621] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-a170fd95-3f7f-4315-a063-b9d02a7a1af4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.338621] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-a170fd95-3f7f-4315-a063-b9d02a7a1af4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.338621] env[62383]: DEBUG nova.network.neutron [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 567.359944] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bdbca9-f769-e0d6-ef32-21307ef5d091, 'name': SearchDatastore_Task, 'duration_secs': 0.032468} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.360320] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.360579] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 567.360815] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.360960] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.361181] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 567.361477] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b399266-0907-4982-888b-da4851a2b633 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.379834] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 567.379834] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 567.380685] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-040fcba5-97f3-4ce2-967b-be0f50c2e45f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.387883] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451000, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.390991] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 567.390991] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5268c7cc-0ab2-a898-8835-0f3c9416b8cf" [ 567.390991] env[62383]: _type = "Task" [ 567.390991] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.401015] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5268c7cc-0ab2-a898-8835-0f3c9416b8cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.404157] env[62383]: DEBUG nova.network.neutron [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Successfully created port: 925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.601036] env[62383]: DEBUG nova.scheduler.client.report [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 567.688854] env[62383]: DEBUG nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 567.729777] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 567.731025] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.731255] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 567.731452] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.731596] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 567.731742] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 567.731947] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 567.732114] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 567.732275] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 567.732441] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 567.732608] env[62383]: DEBUG nova.virt.hardware [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 567.734081] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f245fd65-7edb-47fc-98bc-439328083ca3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.742565] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671bb8d7-d0c0-4157-83d6-05aaef3b496b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.882429] env[62383]: DEBUG oslo_vmware.api [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451000, 'name': PowerOnVM_Task, 'duration_secs': 0.696468} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.882698] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 567.882918] env[62383]: INFO nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Took 12.18 seconds to spawn the instance on the hypervisor. [ 567.883043] env[62383]: DEBUG nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 567.883798] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f2be7e-2072-4266-be9c-11c5e2279633 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.894022] env[62383]: DEBUG nova.network.neutron [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.905380] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5268c7cc-0ab2-a898-8835-0f3c9416b8cf, 'name': SearchDatastore_Task, 'duration_secs': 0.021438} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.906209] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20ed6b64-3f59-4481-9e86-ee3671977627 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.912489] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 567.912489] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ccc6ca-2566-f146-9168-dc8507e436aa" [ 567.912489] env[62383]: _type = "Task" [ 567.912489] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.920903] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ccc6ca-2566-f146-9168-dc8507e436aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.109264] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.109856] env[62383]: DEBUG nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 568.113402] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.570s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.117124] env[62383]: INFO nova.compute.claims [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 568.133593] env[62383]: DEBUG nova.network.neutron [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Updating instance_info_cache with network_info: [{"id": "d1a6d806-7723-4d98-843f-fe34d9a9f94c", "address": "fa:16:3e:89:85:39", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1a6d806-77", "ovs_interfaceid": "d1a6d806-7723-4d98-843f-fe34d9a9f94c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.181713] env[62383]: DEBUG nova.network.neutron [-] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.292268] env[62383]: DEBUG nova.compute.manager [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Received event network-vif-plugged-28133d04-f592-4f43-9ade-58deef12e1f2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 568.292412] env[62383]: DEBUG oslo_concurrency.lockutils [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] Acquiring lock "571a5250-8655-4f30-b193-919affbc1bd8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.292695] env[62383]: DEBUG oslo_concurrency.lockutils [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] Lock "571a5250-8655-4f30-b193-919affbc1bd8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.292893] env[62383]: DEBUG oslo_concurrency.lockutils [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] Lock "571a5250-8655-4f30-b193-919affbc1bd8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.296126] env[62383]: DEBUG nova.compute.manager [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] No waiting events found dispatching network-vif-plugged-28133d04-f592-4f43-9ade-58deef12e1f2 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 568.296126] env[62383]: WARNING nova.compute.manager [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Received unexpected event network-vif-plugged-28133d04-f592-4f43-9ade-58deef12e1f2 for instance with vm_state building and task_state spawning. [ 568.296126] env[62383]: DEBUG nova.compute.manager [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Received event network-changed-28133d04-f592-4f43-9ade-58deef12e1f2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 568.296126] env[62383]: DEBUG nova.compute.manager [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Refreshing instance network info cache due to event network-changed-28133d04-f592-4f43-9ade-58deef12e1f2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 568.296126] env[62383]: DEBUG oslo_concurrency.lockutils [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] Acquiring lock "refresh_cache-571a5250-8655-4f30-b193-919affbc1bd8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.296389] env[62383]: DEBUG oslo_concurrency.lockutils [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] Acquired lock "refresh_cache-571a5250-8655-4f30-b193-919affbc1bd8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.296389] env[62383]: DEBUG nova.network.neutron [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Refreshing network info cache for port 28133d04-f592-4f43-9ade-58deef12e1f2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.408311] env[62383]: INFO nova.compute.manager [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Took 23.61 seconds to build instance. [ 568.427031] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ccc6ca-2566-f146-9168-dc8507e436aa, 'name': SearchDatastore_Task, 'duration_secs': 0.023566} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 568.427323] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.427577] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 571a5250-8655-4f30-b193-919affbc1bd8/571a5250-8655-4f30-b193-919affbc1bd8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 568.427844] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de22e9bf-530c-4ff7-baa2-ab57f2ce412f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.440054] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 568.440054] env[62383]: value = "task-2451001" [ 568.440054] env[62383]: _type = "Task" [ 568.440054] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.449694] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.624015] env[62383]: DEBUG nova.compute.utils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 568.626230] env[62383]: DEBUG nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 568.626473] env[62383]: DEBUG nova.network.neutron [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 568.635085] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-a170fd95-3f7f-4315-a063-b9d02a7a1af4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.635085] env[62383]: DEBUG nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Instance network_info: |[{"id": "d1a6d806-7723-4d98-843f-fe34d9a9f94c", "address": "fa:16:3e:89:85:39", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1a6d806-77", "ovs_interfaceid": "d1a6d806-7723-4d98-843f-fe34d9a9f94c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 568.635300] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:85:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1a6d806-7723-4d98-843f-fe34d9a9f94c', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 568.645374] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating folder: Project (603ba5501c904542b6ff0935f620e6da). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.647391] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f899b97b-5631-41ce-86b8-2f6f3023b191 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.659267] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created folder: Project (603ba5501c904542b6ff0935f620e6da) in parent group-v496304. [ 568.659396] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating folder: Instances. Parent ref: group-v496333. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 568.659676] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a24787ed-bc98-4410-b802-ceb50917f25d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.670819] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created folder: Instances in parent group-v496333. [ 568.671094] env[62383]: DEBUG oslo.service.loopingcall [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.671307] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 568.671559] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8625735-c2fb-47f5-b865-4197ffe2214b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.687849] env[62383]: DEBUG nova.policy [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e46b11500414feab2651f42e88cc7cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9e3f9cc0f33c4f6b9bee57e176d9048e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 568.691719] env[62383]: INFO nova.compute.manager [-] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Took 1.91 seconds to deallocate network for instance. [ 568.699251] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 568.699251] env[62383]: value = "task-2451004" [ 568.699251] env[62383]: _type = "Task" [ 568.699251] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.712434] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451004, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.832523] env[62383]: DEBUG nova.compute.manager [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Received event network-vif-plugged-d1a6d806-7723-4d98-843f-fe34d9a9f94c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 568.832785] env[62383]: DEBUG oslo_concurrency.lockutils [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] Acquiring lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.833016] env[62383]: DEBUG oslo_concurrency.lockutils [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.833324] env[62383]: DEBUG oslo_concurrency.lockutils [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.833505] env[62383]: DEBUG nova.compute.manager [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] No waiting events found dispatching network-vif-plugged-d1a6d806-7723-4d98-843f-fe34d9a9f94c {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 568.833666] env[62383]: WARNING nova.compute.manager [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Received unexpected event network-vif-plugged-d1a6d806-7723-4d98-843f-fe34d9a9f94c for instance with vm_state building and task_state spawning. [ 568.833820] env[62383]: DEBUG nova.compute.manager [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Received event network-changed-d1a6d806-7723-4d98-843f-fe34d9a9f94c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 568.833970] env[62383]: DEBUG nova.compute.manager [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Refreshing instance network info cache due to event network-changed-d1a6d806-7723-4d98-843f-fe34d9a9f94c. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 568.834173] env[62383]: DEBUG oslo_concurrency.lockutils [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] Acquiring lock "refresh_cache-a170fd95-3f7f-4315-a063-b9d02a7a1af4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.834305] env[62383]: DEBUG oslo_concurrency.lockutils [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] Acquired lock "refresh_cache-a170fd95-3f7f-4315-a063-b9d02a7a1af4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.834467] env[62383]: DEBUG nova.network.neutron [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Refreshing network info cache for port d1a6d806-7723-4d98-843f-fe34d9a9f94c {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.911024] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7281767b-608b-4157-a5ef-24be64515387 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.124s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.951232] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451001, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.956397] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.956397] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.134857] env[62383]: DEBUG nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 569.150171] env[62383]: DEBUG nova.network.neutron [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Successfully created port: 940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 569.198950] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.217018] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451004, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.324367] env[62383]: DEBUG nova.network.neutron [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Updated VIF entry in instance network info cache for port 28133d04-f592-4f43-9ade-58deef12e1f2. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 569.324725] env[62383]: DEBUG nova.network.neutron [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Updating instance_info_cache with network_info: [{"id": "28133d04-f592-4f43-9ade-58deef12e1f2", "address": "fa:16:3e:df:16:74", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.122", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28133d04-f5", "ovs_interfaceid": "28133d04-f592-4f43-9ade-58deef12e1f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.419041] env[62383]: DEBUG nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 569.454583] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587859} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.455240] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 571a5250-8655-4f30-b193-919affbc1bd8/571a5250-8655-4f30-b193-919affbc1bd8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 569.458023] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 569.458023] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05004d44-f690-4665-bed1-76908ea699a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.462987] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 569.462987] env[62383]: value = "task-2451005" [ 569.462987] env[62383]: _type = "Task" [ 569.462987] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.481901] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.557074] env[62383]: DEBUG nova.network.neutron [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Successfully updated port: 925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 569.570693] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50e2c25-9d3a-4b3d-b1ff-6ba5ce76ce53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.580902] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d88c6b-3b5c-497b-98f4-7517b0fb72bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.619177] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5b7c1b-4455-46f4-94cf-00df14f45cb8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.630059] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20bd9ad-4a7d-4056-be7a-d2dad2def6b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.652288] env[62383]: DEBUG nova.compute.provider_tree [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.711053] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451004, 'name': CreateVM_Task, 'duration_secs': 0.627939} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.711386] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 569.711945] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.712124] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.712445] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 569.712710] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63ccb6f2-5cda-439a-961b-502ddc5709f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.720616] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 569.720616] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52196ef3-495f-a3b9-ec00-a936b53964cd" [ 569.720616] env[62383]: _type = "Task" [ 569.720616] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.730219] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52196ef3-495f-a3b9-ec00-a936b53964cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.831124] env[62383]: DEBUG oslo_concurrency.lockutils [req-9474b059-0690-4049-a9c3-68da9edf7c41 req-bfef8030-9247-4386-bf43-3b5fe8c8bf6b service nova] Releasing lock "refresh_cache-571a5250-8655-4f30-b193-919affbc1bd8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.898627] env[62383]: DEBUG nova.network.neutron [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Updated VIF entry in instance network info cache for port d1a6d806-7723-4d98-843f-fe34d9a9f94c. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 569.899223] env[62383]: DEBUG nova.network.neutron [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Updating instance_info_cache with network_info: [{"id": "d1a6d806-7723-4d98-843f-fe34d9a9f94c", "address": "fa:16:3e:89:85:39", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1a6d806-77", "ovs_interfaceid": "d1a6d806-7723-4d98-843f-fe34d9a9f94c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.945927] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.975539] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.060177] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.062891] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.062891] env[62383]: DEBUG nova.network.neutron [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 570.155910] env[62383]: DEBUG nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 570.159718] env[62383]: DEBUG nova.scheduler.client.report [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 570.208429] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 570.208737] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.208904] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 570.210097] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.210288] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 570.210438] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 570.210651] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 570.210849] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 570.211046] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 570.211212] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 570.211379] env[62383]: DEBUG nova.virt.hardware [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 570.212403] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8dad7c-6ebf-43a9-a98e-f7f075836687 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.230908] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6dd3a6-b2b1-49ca-95ae-0c1c9883262a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.264043] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52196ef3-495f-a3b9-ec00-a936b53964cd, 'name': SearchDatastore_Task, 'duration_secs': 0.021159} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.264566] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.264809] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 570.265075] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.265205] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.265505] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 570.265668] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df4c2876-f57d-4326-b6b9-35027f33f2d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.281212] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 570.281370] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 570.282128] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a79521c-e7c0-4e91-acf1-5e1947107096 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.289835] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 570.289835] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52225677-4f92-6755-20eb-7548f7e62c52" [ 570.289835] env[62383]: _type = "Task" [ 570.289835] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.300364] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52225677-4f92-6755-20eb-7548f7e62c52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.406515] env[62383]: DEBUG oslo_concurrency.lockutils [req-7fb63be9-768a-42d5-bd35-2e87c574482c req-98738115-f9f2-4baa-b045-b2df25f4ab5e service nova] Releasing lock "refresh_cache-a170fd95-3f7f-4315-a063-b9d02a7a1af4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.439031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "f28beb17-8455-49d3-8be0-7636b9abe4e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.439031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.477666] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451005, 'name': ExtendVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.659035] env[62383]: DEBUG nova.network.neutron [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 570.667031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.667759] env[62383]: DEBUG nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 570.671848] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.548s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.672208] env[62383]: DEBUG nova.objects.instance [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Lazy-loading 'resources' on Instance uuid 7b8c8c12-fcf3-4b54-ae22-3aead1344803 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 570.810114] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52225677-4f92-6755-20eb-7548f7e62c52, 'name': SearchDatastore_Task, 'duration_secs': 0.023158} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.811739] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-998a8d97-3041-4d40-8ede-825c151ffdf3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.822457] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 570.822457] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c65f14-6a40-b587-d11a-82734f31510f" [ 570.822457] env[62383]: _type = "Task" [ 570.822457] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.837395] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c65f14-6a40-b587-d11a-82734f31510f, 'name': SearchDatastore_Task, 'duration_secs': 0.010712} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.837684] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.837927] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 570.838214] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f678897-8673-47b0-a630-7ef9073a0976 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.849377] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 570.849377] env[62383]: value = "task-2451006" [ 570.849377] env[62383]: _type = "Task" [ 570.849377] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.859348] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.980821] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.016984} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.980821] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 570.980821] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c147008c-cb49-46b0-9c1b-12601db7b76b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.004863] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 571a5250-8655-4f30-b193-919affbc1bd8/571a5250-8655-4f30-b193-919affbc1bd8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 571.005139] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf1a938d-0803-4e9b-9f42-f1015d37ed4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.025916] env[62383]: DEBUG nova.network.neutron [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.033588] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 571.033588] env[62383]: value = "task-2451007" [ 571.033588] env[62383]: _type = "Task" [ 571.033588] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.044303] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.178049] env[62383]: DEBUG nova.compute.utils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.181815] env[62383]: DEBUG nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 571.182054] env[62383]: DEBUG nova.network.neutron [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 571.256553] env[62383]: DEBUG nova.policy [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c59d21fc407f49acb7752b6053101ca1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fe96856d7cbb433981c53498b15cfef3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 571.366537] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.507308] env[62383]: DEBUG nova.compute.manager [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Received event network-vif-deleted-1992c731-9b69-4b2d-8da4-293986dba848 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 571.507308] env[62383]: DEBUG nova.compute.manager [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Received event network-vif-plugged-925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 571.507308] env[62383]: DEBUG oslo_concurrency.lockutils [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] Acquiring lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.507308] env[62383]: DEBUG oslo_concurrency.lockutils [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.507308] env[62383]: DEBUG oslo_concurrency.lockutils [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.507620] env[62383]: DEBUG nova.compute.manager [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] No waiting events found dispatching network-vif-plugged-925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 571.507620] env[62383]: WARNING nova.compute.manager [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Received unexpected event network-vif-plugged-925071ab-96dd-4c80-901e-9dba6c4a5a9c for instance with vm_state building and task_state spawning. [ 571.507620] env[62383]: DEBUG nova.compute.manager [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Received event network-changed-925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 571.507620] env[62383]: DEBUG nova.compute.manager [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Refreshing instance network info cache due to event network-changed-925071ab-96dd-4c80-901e-9dba6c4a5a9c. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 571.507620] env[62383]: DEBUG oslo_concurrency.lockutils [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] Acquiring lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.529236] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.529582] env[62383]: DEBUG nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Instance network_info: |[{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 571.532756] env[62383]: DEBUG oslo_concurrency.lockutils [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] Acquired lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.532968] env[62383]: DEBUG nova.network.neutron [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Refreshing network info cache for port 925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 571.534224] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:2f:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '925071ab-96dd-4c80-901e-9dba6c4a5a9c', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 571.548504] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating folder: Project (2439f3d802f34027b12d50f242a54ba3). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 571.552437] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5001a766-86fd-46d5-9531-2da6b68a77c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.561997] env[62383]: DEBUG nova.network.neutron [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Successfully updated port: 940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 571.572125] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451007, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.573519] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Created folder: Project (2439f3d802f34027b12d50f242a54ba3) in parent group-v496304. [ 571.573701] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating folder: Instances. Parent ref: group-v496336. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 571.574054] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b206d64b-116b-4436-a35b-5095cc4d966c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.583775] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570f0534-8522-40a0-a647-1b803936e311 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.589859] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Created folder: Instances in parent group-v496336. [ 571.590270] env[62383]: DEBUG oslo.service.loopingcall [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.590982] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 571.591752] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1144c2ea-6e7d-4134-8f10-bb5a7c625091 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.610022] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38569ed8-bae0-4dff-a6c0-c31dd00c00d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.614805] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 571.614805] env[62383]: value = "task-2451010" [ 571.614805] env[62383]: _type = "Task" [ 571.614805] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.648895] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b654114-c8fd-483c-9610-0328cdc49f71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.654973] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451010, 'name': CreateVM_Task} progress is 15%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.660264] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ceb7933-023c-47e1-89c6-0518284265cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.678067] env[62383]: DEBUG nova.compute.provider_tree [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.684487] env[62383]: DEBUG nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 571.799259] env[62383]: DEBUG nova.network.neutron [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Successfully created port: 7969303b-5cee-496e-841d-a0a254ed01e3 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 571.861864] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451006, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.067764] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.068325] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquired lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.068325] env[62383]: DEBUG nova.network.neutron [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 572.069441] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451007, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.127988] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451010, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.142531] env[62383]: DEBUG nova.network.neutron [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updated VIF entry in instance network info cache for port 925071ab-96dd-4c80-901e-9dba6c4a5a9c. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 572.142531] env[62383]: DEBUG nova.network.neutron [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.183431] env[62383]: DEBUG nova.scheduler.client.report [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 572.364661] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451006, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.177129} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.366473] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 572.366757] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 572.369899] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ab227e0-bc8e-4d46-afcc-d84008f8a510 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.373328] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "13db2c17-ccba-4336-929a-0d01202c5143" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.373788] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "13db2c17-ccba-4336-929a-0d01202c5143" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.380764] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 572.380764] env[62383]: value = "task-2451011" [ 572.380764] env[62383]: _type = "Task" [ 572.380764] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.393788] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.575592] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451007, 'name': ReconfigVM_Task, 'duration_secs': 1.174737} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.575592] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 571a5250-8655-4f30-b193-919affbc1bd8/571a5250-8655-4f30-b193-919affbc1bd8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 572.575946] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c90d497b-e491-4d0c-8a4c-0f81ee20af31 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.583729] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 572.583729] env[62383]: value = "task-2451012" [ 572.583729] env[62383]: _type = "Task" [ 572.583729] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.594782] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451012, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.631149] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451010, 'name': CreateVM_Task, 'duration_secs': 0.568759} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.631406] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 572.632274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 572.632390] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.632650] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 572.632912] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed2ac35-d4c8-4ca5-8356-a66fd81726f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.639361] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 572.639361] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522de586-0e34-8f9e-fcae-6190fc1cb662" [ 572.639361] env[62383]: _type = "Task" [ 572.639361] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.648686] env[62383]: DEBUG nova.network.neutron [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.651033] env[62383]: DEBUG oslo_concurrency.lockutils [req-6a87bfa2-0b01-4773-9c1a-ed505aeb21fa req-58fec6bb-6df9-4c64-86d7-ce7637ffe527 service nova] Releasing lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.656325] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522de586-0e34-8f9e-fcae-6190fc1cb662, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.696109] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.023s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.698026] env[62383]: DEBUG nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 572.700167] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.902s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.707620] env[62383]: INFO nova.compute.claims [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.734993] env[62383]: INFO nova.scheduler.client.report [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Deleted allocations for instance 7b8c8c12-fcf3-4b54-ae22-3aead1344803 [ 572.754512] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 572.754811] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 572.754887] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 572.755075] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 572.755227] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 572.755368] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 572.755576] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 572.755734] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 572.755897] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 572.756183] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 572.756387] env[62383]: DEBUG nova.virt.hardware [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 572.757279] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bd66d3-378a-4466-9504-5d457697c01f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.767127] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7d3ec3-b5a4-4718-a832-45f7903dc4b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.894563] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451011, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062323} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.895251] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 572.896457] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af874cbf-edf2-4535-b2a8-ffcc5839a64b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.925109] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 572.925109] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed8b5994-6745-4c64-b44a-055cdfd39447 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.939962] env[62383]: DEBUG nova.network.neutron [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Updating instance_info_cache with network_info: [{"id": "940c815a-1c07-492f-8b17-e4a57d123790", "address": "fa:16:3e:9a:67:f3", "network": {"id": "3f07f890-19a6-41bf-9948-3601561d0dae", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-243075880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e3f9cc0f33c4f6b9bee57e176d9048e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940c815a-1c", "ovs_interfaceid": "940c815a-1c07-492f-8b17-e4a57d123790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.947663] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 572.947663] env[62383]: value = "task-2451013" [ 572.947663] env[62383]: _type = "Task" [ 572.947663] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.957334] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.097642] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451012, 'name': Rename_Task, 'duration_secs': 0.459248} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.097642] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 573.098582] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53d8f2cc-ac48-43ad-a437-946276064850 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.108072] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 573.108072] env[62383]: value = "task-2451014" [ 573.108072] env[62383]: _type = "Task" [ 573.108072] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.118558] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.151322] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522de586-0e34-8f9e-fcae-6190fc1cb662, 'name': SearchDatastore_Task, 'duration_secs': 0.074072} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.151711] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.151959] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.152378] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 573.152528] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.156021] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.156021] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3759a18-0abb-4a3e-abd3-0cd4df116899 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.163739] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.164016] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 573.164889] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0afde60d-7c76-4c52-95aa-00a8aa53568f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.173284] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 573.173284] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5238e582-b010-e938-4b22-9ee4b01ac810" [ 573.173284] env[62383]: _type = "Task" [ 573.173284] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.181684] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5238e582-b010-e938-4b22-9ee4b01ac810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.251565] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4d488990-0fbf-431f-a5f9-1d0c854be810 tempest-DeleteServersAdminTestJSON-659502086 tempest-DeleteServersAdminTestJSON-659502086-project-admin] Lock "7b8c8c12-fcf3-4b54-ae22-3aead1344803" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.443s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.444713] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Releasing lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.444713] env[62383]: DEBUG nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Instance network_info: |[{"id": "940c815a-1c07-492f-8b17-e4a57d123790", "address": "fa:16:3e:9a:67:f3", "network": {"id": "3f07f890-19a6-41bf-9948-3601561d0dae", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-243075880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e3f9cc0f33c4f6b9bee57e176d9048e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940c815a-1c", "ovs_interfaceid": "940c815a-1c07-492f-8b17-e4a57d123790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 573.447019] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:67:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b107fab-ee71-47db-ad4d-3c6f05546843', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '940c815a-1c07-492f-8b17-e4a57d123790', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 573.457573] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Creating folder: Project (9e3f9cc0f33c4f6b9bee57e176d9048e). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 573.458713] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b30f88a0-cf32-43ea-ba86-431165f5cee4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.475095] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.476595] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Created folder: Project (9e3f9cc0f33c4f6b9bee57e176d9048e) in parent group-v496304. [ 573.477415] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Creating folder: Instances. Parent ref: group-v496339. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 573.477804] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b58963ac-54ee-4250-a173-47e35bc11e3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.489041] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Created folder: Instances in parent group-v496339. [ 573.489041] env[62383]: DEBUG oslo.service.loopingcall [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.489041] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 573.489041] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e96d865-4a2d-44c0-8e48-0de2303beff5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.513649] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 573.513649] env[62383]: value = "task-2451017" [ 573.513649] env[62383]: _type = "Task" [ 573.513649] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.521195] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451017, 'name': CreateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.624386] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451014, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.687055] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5238e582-b010-e938-4b22-9ee4b01ac810, 'name': SearchDatastore_Task, 'duration_secs': 0.011321} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.688176] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0adcacc3-ad53-4314-bc1c-28092e9f26f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.698134] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 573.698134] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c885e5-0a7c-aec8-f6fc-b62d5ac53a6d" [ 573.698134] env[62383]: _type = "Task" [ 573.698134] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.718448] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c885e5-0a7c-aec8-f6fc-b62d5ac53a6d, 'name': SearchDatastore_Task, 'duration_secs': 0.010579} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.720413] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.720413] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8a165d96-f503-4bc5-bff4-e6a85201e137/8a165d96-f503-4bc5-bff4-e6a85201e137.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 573.720413] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70b9a7fa-0c8f-49d6-b74f-ee1db9359454 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.730061] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 573.730061] env[62383]: value = "task-2451018" [ 573.730061] env[62383]: _type = "Task" [ 573.730061] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.740866] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451018, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.975030] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451013, 'name': ReconfigVM_Task, 'duration_secs': 0.629365} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.975030] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Reconfigured VM instance instance-0000000b to attach disk [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 573.975030] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3fc1aec-862f-4af0-831c-312fe5a1af64 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.982599] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 573.982599] env[62383]: value = "task-2451019" [ 573.982599] env[62383]: _type = "Task" [ 573.982599] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.991352] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451019, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.028041] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451017, 'name': CreateVM_Task, 'duration_secs': 0.319614} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.028041] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 574.028938] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.029406] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.030045] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 574.030168] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4558b539-f39b-4bae-8451-1208b1d3f8b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.035743] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 574.035743] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528e1642-9e58-d547-b268-e3a5397d05a0" [ 574.035743] env[62383]: _type = "Task" [ 574.035743] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.050632] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528e1642-9e58-d547-b268-e3a5397d05a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.123614] env[62383]: DEBUG oslo_vmware.api [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451014, 'name': PowerOnVM_Task, 'duration_secs': 0.682985} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.127606] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 574.127837] env[62383]: INFO nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Took 11.47 seconds to spawn the instance on the hypervisor. [ 574.128030] env[62383]: DEBUG nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 574.129532] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82335450-7581-4da1-888f-a764e60d88fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.152704] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c311534-861a-4c8d-bf67-96bfcfb8b005 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.161785] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.162888] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.167685] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c8dc92-59b8-4eaf-8722-2abe65b9e0dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.207415] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d670cee-cf96-46ba-a5b0-b2e0a4febedd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.220241] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48246f1-d374-4fce-9821-8b207f0b40f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.237978] env[62383]: DEBUG nova.compute.provider_tree [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 574.251353] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451018, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.348107] env[62383]: DEBUG nova.network.neutron [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Successfully updated port: 7969303b-5cee-496e-841d-a0a254ed01e3 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 574.495406] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451019, 'name': Rename_Task, 'duration_secs': 0.429171} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.495676] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 574.495901] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f92f9ed-1fd1-4a6c-9b43-e934ddadadd4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.502925] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 574.502925] env[62383]: value = "task-2451020" [ 574.502925] env[62383]: _type = "Task" [ 574.502925] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.511321] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.546590] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528e1642-9e58-d547-b268-e3a5397d05a0, 'name': SearchDatastore_Task, 'duration_secs': 0.061345} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.546876] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.547855] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 574.547855] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.547855] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.548929] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 574.548929] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b459436-e98f-4cd3-9d15-0d5198eb5f5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.571299] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 574.571499] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 574.572436] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1001fd6-1d11-47df-bc58-53e3283ad737 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.579553] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 574.579553] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e50296-b9d0-2fa3-42f0-475ff786ea20" [ 574.579553] env[62383]: _type = "Task" [ 574.579553] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.590537] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e50296-b9d0-2fa3-42f0-475ff786ea20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.653077] env[62383]: INFO nova.compute.manager [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Took 25.41 seconds to build instance. [ 574.685541] env[62383]: DEBUG nova.compute.manager [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Received event network-vif-plugged-940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 574.685885] env[62383]: DEBUG oslo_concurrency.lockutils [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] Acquiring lock "14bb9b79-d224-4a64-861e-30dd919c5741-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.685961] env[62383]: DEBUG oslo_concurrency.lockutils [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] Lock "14bb9b79-d224-4a64-861e-30dd919c5741-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.686162] env[62383]: DEBUG oslo_concurrency.lockutils [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] Lock "14bb9b79-d224-4a64-861e-30dd919c5741-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.686499] env[62383]: DEBUG nova.compute.manager [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] No waiting events found dispatching network-vif-plugged-940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 574.686844] env[62383]: WARNING nova.compute.manager [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Received unexpected event network-vif-plugged-940c815a-1c07-492f-8b17-e4a57d123790 for instance with vm_state building and task_state spawning. [ 574.686919] env[62383]: DEBUG nova.compute.manager [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Received event network-changed-940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 574.687099] env[62383]: DEBUG nova.compute.manager [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Refreshing instance network info cache due to event network-changed-940c815a-1c07-492f-8b17-e4a57d123790. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 574.687545] env[62383]: DEBUG oslo_concurrency.lockutils [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] Acquiring lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.687715] env[62383]: DEBUG oslo_concurrency.lockutils [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] Acquired lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.687861] env[62383]: DEBUG nova.network.neutron [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Refreshing network info cache for port 940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 574.753785] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451018, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594499} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.754856] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8a165d96-f503-4bc5-bff4-e6a85201e137/8a165d96-f503-4bc5-bff4-e6a85201e137.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 574.754856] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 574.757245] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2e7ab24-348e-4c74-b117-b4db13ebca32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.762893] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 574.762893] env[62383]: value = "task-2451021" [ 574.762893] env[62383]: _type = "Task" [ 574.762893] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.775569] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451021, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.779933] env[62383]: ERROR nova.scheduler.client.report [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [req-683621b8-c487-4dd8-8785-b9b9cacadb56] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-683621b8-c487-4dd8-8785-b9b9cacadb56"}]} [ 574.798676] env[62383]: DEBUG nova.scheduler.client.report [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 574.817785] env[62383]: DEBUG nova.scheduler.client.report [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 574.818131] env[62383]: DEBUG nova.compute.provider_tree [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 574.832999] env[62383]: DEBUG nova.scheduler.client.report [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 574.857628] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "refresh_cache-dd0ad4e3-a6e6-4258-b960-544984e24ebc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.857628] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired lock "refresh_cache-dd0ad4e3-a6e6-4258-b960-544984e24ebc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.857628] env[62383]: DEBUG nova.network.neutron [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 574.859989] env[62383]: DEBUG nova.scheduler.client.report [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 574.984125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.984125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.014475] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451020, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.092764] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e50296-b9d0-2fa3-42f0-475ff786ea20, 'name': SearchDatastore_Task, 'duration_secs': 0.027577} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.093719] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fb73ecf-eadd-4933-a15c-dc64cdf83fbf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.101852] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 575.101852] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52150821-b8c6-ed2b-dbdd-ae8aa3cf5f09" [ 575.101852] env[62383]: _type = "Task" [ 575.101852] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.117313] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52150821-b8c6-ed2b-dbdd-ae8aa3cf5f09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.156166] env[62383]: DEBUG oslo_concurrency.lockutils [None req-90d37cc7-aba3-457d-b2c1-b4dbabda8887 tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "571a5250-8655-4f30-b193-919affbc1bd8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.926s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.277796] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451021, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07882} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.278869] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 575.278992] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90114f2f-06a1-4cce-9468-1d145805fbf1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.304600] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 8a165d96-f503-4bc5-bff4-e6a85201e137/8a165d96-f503-4bc5-bff4-e6a85201e137.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 575.308941] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c12f7994-6fad-4815-9d34-1b9b92584263 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.325299] env[62383]: DEBUG nova.compute.manager [req-958888ec-5455-4933-902f-28d57a8ad47c req-cada6f80-3656-429b-ac65-62a8ace652cc service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Received event network-vif-plugged-7969303b-5cee-496e-841d-a0a254ed01e3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 575.325348] env[62383]: DEBUG oslo_concurrency.lockutils [req-958888ec-5455-4933-902f-28d57a8ad47c req-cada6f80-3656-429b-ac65-62a8ace652cc service nova] Acquiring lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.325613] env[62383]: DEBUG oslo_concurrency.lockutils [req-958888ec-5455-4933-902f-28d57a8ad47c req-cada6f80-3656-429b-ac65-62a8ace652cc service nova] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.325868] env[62383]: DEBUG oslo_concurrency.lockutils [req-958888ec-5455-4933-902f-28d57a8ad47c req-cada6f80-3656-429b-ac65-62a8ace652cc service nova] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.325981] env[62383]: DEBUG nova.compute.manager [req-958888ec-5455-4933-902f-28d57a8ad47c req-cada6f80-3656-429b-ac65-62a8ace652cc service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] No waiting events found dispatching network-vif-plugged-7969303b-5cee-496e-841d-a0a254ed01e3 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 575.326159] env[62383]: WARNING nova.compute.manager [req-958888ec-5455-4933-902f-28d57a8ad47c req-cada6f80-3656-429b-ac65-62a8ace652cc service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Received unexpected event network-vif-plugged-7969303b-5cee-496e-841d-a0a254ed01e3 for instance with vm_state building and task_state spawning. [ 575.331013] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "60535a30-4602-4063-94a4-30ed01266d5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.331013] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "60535a30-4602-4063-94a4-30ed01266d5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 575.336455] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 575.336455] env[62383]: value = "task-2451022" [ 575.336455] env[62383]: _type = "Task" [ 575.336455] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.350955] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451022, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.421377] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ee162b-4175-4448-bb91-7656aa12f22a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.436107] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c68d8b1-6518-4aef-bcdf-646fc961bdd6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.469017] env[62383]: DEBUG nova.network.neutron [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.471978] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc56e80a-e856-445f-b5cb-7facae2c5445 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.484697] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a64dfa-f841-4b88-8a1c-44b65dbcf66b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.506819] env[62383]: DEBUG nova.compute.provider_tree [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 575.521114] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451020, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.601687] env[62383]: DEBUG nova.network.neutron [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Updated VIF entry in instance network info cache for port 940c815a-1c07-492f-8b17-e4a57d123790. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 575.601990] env[62383]: DEBUG nova.network.neutron [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Updating instance_info_cache with network_info: [{"id": "940c815a-1c07-492f-8b17-e4a57d123790", "address": "fa:16:3e:9a:67:f3", "network": {"id": "3f07f890-19a6-41bf-9948-3601561d0dae", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-243075880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e3f9cc0f33c4f6b9bee57e176d9048e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940c815a-1c", "ovs_interfaceid": "940c815a-1c07-492f-8b17-e4a57d123790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.636633] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52150821-b8c6-ed2b-dbdd-ae8aa3cf5f09, 'name': SearchDatastore_Task, 'duration_secs': 0.030159} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.636633] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.636633] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 14bb9b79-d224-4a64-861e-30dd919c5741/14bb9b79-d224-4a64-861e-30dd919c5741.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 575.636633] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d404b13-e40b-408b-9480-af0e91f5493a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.636633] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 575.637245] env[62383]: value = "task-2451023" [ 575.637245] env[62383]: _type = "Task" [ 575.637245] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.648738] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.666203] env[62383]: DEBUG nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 575.730473] env[62383]: DEBUG nova.network.neutron [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Updating instance_info_cache with network_info: [{"id": "7969303b-5cee-496e-841d-a0a254ed01e3", "address": "fa:16:3e:c8:6d:55", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7969303b-5c", "ovs_interfaceid": "7969303b-5cee-496e-841d-a0a254ed01e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.848992] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451022, 'name': ReconfigVM_Task, 'duration_secs': 0.442326} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.849316] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 8a165d96-f503-4bc5-bff4-e6a85201e137/8a165d96-f503-4bc5-bff4-e6a85201e137.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 575.850175] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43581956-ad8c-4dd0-b512-b05c4bdbc8d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.857339] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 575.857339] env[62383]: value = "task-2451024" [ 575.857339] env[62383]: _type = "Task" [ 575.857339] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.867203] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451024, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.020718] env[62383]: DEBUG oslo_vmware.api [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451020, 'name': PowerOnVM_Task, 'duration_secs': 1.036978} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.021191] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 576.021191] env[62383]: INFO nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Took 10.90 seconds to spawn the instance on the hypervisor. [ 576.021366] env[62383]: DEBUG nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 576.022288] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b4a381-145b-402a-b2c5-ef2016062968 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.065028] env[62383]: DEBUG nova.scheduler.client.report [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 33 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 576.066638] env[62383]: DEBUG nova.compute.provider_tree [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 33 to 34 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 576.066638] env[62383]: DEBUG nova.compute.provider_tree [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 576.114138] env[62383]: DEBUG oslo_concurrency.lockutils [req-e543cffd-7590-484a-8f59-ea440378ce02 req-d924ce94-b0e4-41c4-b835-0089b15bd5f2 service nova] Releasing lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.147435] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451023, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.199185] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.234348] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Releasing lock "refresh_cache-dd0ad4e3-a6e6-4258-b960-544984e24ebc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 576.234729] env[62383]: DEBUG nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Instance network_info: |[{"id": "7969303b-5cee-496e-841d-a0a254ed01e3", "address": "fa:16:3e:c8:6d:55", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7969303b-5c", "ovs_interfaceid": "7969303b-5cee-496e-841d-a0a254ed01e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 576.236243] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:6d:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7969303b-5cee-496e-841d-a0a254ed01e3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 576.243646] env[62383]: DEBUG oslo.service.loopingcall [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 576.244117] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 576.244976] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-659365a5-0357-4c34-a559-fedf2d92ed89 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.266519] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 576.266519] env[62383]: value = "task-2451025" [ 576.266519] env[62383]: _type = "Task" [ 576.266519] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.276449] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451025, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.368678] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451024, 'name': Rename_Task, 'duration_secs': 0.169023} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.369092] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 576.369389] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0734e80-684d-4225-9bbe-610b9bc11748 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.377262] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 576.377262] env[62383]: value = "task-2451026" [ 576.377262] env[62383]: _type = "Task" [ 576.377262] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.392117] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.548172] env[62383]: INFO nova.compute.manager [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Took 25.67 seconds to build instance. [ 576.571619] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.871s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.572442] env[62383]: DEBUG nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 576.577944] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.459s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.584025] env[62383]: INFO nova.compute.claims [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.660637] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675955} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.667286] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 14bb9b79-d224-4a64-861e-30dd919c5741/14bb9b79-d224-4a64-861e-30dd919c5741.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 576.667512] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 576.667821] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-03168627-908f-4624-9901-e2a4f83659a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.676803] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 576.676803] env[62383]: value = "task-2451027" [ 576.676803] env[62383]: _type = "Task" [ 576.676803] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.686773] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.780335] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451025, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.788227] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "045e5f8f-edd5-425d-bccb-054d90db27d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.789496] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "045e5f8f-edd5-425d-bccb-054d90db27d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.889994] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451026, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.057342] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1501de98-1df7-48b2-ac69-176c10c4b1d2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.975s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.088241] env[62383]: DEBUG nova.compute.utils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 577.092708] env[62383]: DEBUG nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 577.092908] env[62383]: DEBUG nova.network.neutron [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 577.098336] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "152567ba-f24c-4674-b06e-98c76a3da324" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.098839] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "152567ba-f24c-4674-b06e-98c76a3da324" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.170169] env[62383]: DEBUG nova.policy [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcce66563194bab86486a66106ef770', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75346deaf9ad40fa925d4aff9fdff2cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 577.195700] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.286166] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451025, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.388070] env[62383]: DEBUG oslo_vmware.api [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451026, 'name': PowerOnVM_Task, 'duration_secs': 0.514816} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.388332] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 577.388559] env[62383]: INFO nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Took 9.70 seconds to spawn the instance on the hypervisor. [ 577.388795] env[62383]: DEBUG nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 577.389584] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5310db8-6abd-4830-b272-77beae0483cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.561137] env[62383]: DEBUG nova.compute.manager [None req-d4d19eca-d4ab-48d7-a88e-d1b6c6391292 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 905304d9-0288-442a-a024-625dc212e5b2] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 577.593717] env[62383]: DEBUG nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 577.687187] env[62383]: DEBUG nova.network.neutron [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Successfully created port: cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 577.692557] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.777841] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451025, 'name': CreateVM_Task, 'duration_secs': 1.476545} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.778026] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 577.779310] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 577.779310] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.779465] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 577.779706] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba135e3-a53e-4c69-9968-ce83fdc91403 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.785277] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 577.785277] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52691155-e410-9d9b-f4ae-8c6c3d9e2546" [ 577.785277] env[62383]: _type = "Task" [ 577.785277] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.795902] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52691155-e410-9d9b-f4ae-8c6c3d9e2546, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.909871] env[62383]: INFO nova.compute.manager [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Took 26.13 seconds to build instance. [ 578.009684] env[62383]: DEBUG nova.compute.manager [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Received event network-changed-7969303b-5cee-496e-841d-a0a254ed01e3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 578.010021] env[62383]: DEBUG nova.compute.manager [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Refreshing instance network info cache due to event network-changed-7969303b-5cee-496e-841d-a0a254ed01e3. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 578.010104] env[62383]: DEBUG oslo_concurrency.lockutils [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] Acquiring lock "refresh_cache-dd0ad4e3-a6e6-4258-b960-544984e24ebc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.010279] env[62383]: DEBUG oslo_concurrency.lockutils [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] Acquired lock "refresh_cache-dd0ad4e3-a6e6-4258-b960-544984e24ebc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.010896] env[62383]: DEBUG nova.network.neutron [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Refreshing network info cache for port 7969303b-5cee-496e-841d-a0a254ed01e3 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 578.066350] env[62383]: DEBUG nova.compute.manager [None req-d4d19eca-d4ab-48d7-a88e-d1b6c6391292 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 905304d9-0288-442a-a024-625dc212e5b2] Instance disappeared before build. {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 578.094584] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6e750d-3de6-4ff1-ae83-744c10c1601c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.109849] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288f6942-0a9b-458c-a7b9-3690c5273db9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.153667] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1795bf2e-7975-473d-a320-677355640fec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.161752] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ad7b04-9493-48c3-adbc-0403c3f1f099 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.180196] env[62383]: DEBUG nova.compute.provider_tree [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.191632] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.171718} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.192792] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 578.193012] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957e8055-5433-4c9c-80dc-47b44e0329b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.223127] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 14bb9b79-d224-4a64-861e-30dd919c5741/14bb9b79-d224-4a64-861e-30dd919c5741.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 578.223901] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c84148a3-dbf4-4ff3-a54a-a442bb2d9649 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.246086] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 578.246086] env[62383]: value = "task-2451028" [ 578.246086] env[62383]: _type = "Task" [ 578.246086] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.254388] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451028, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.300623] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52691155-e410-9d9b-f4ae-8c6c3d9e2546, 'name': SearchDatastore_Task, 'duration_secs': 0.019553} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.301108] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.303604] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.303604] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 578.303604] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.303604] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 578.303604] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03a41f35-13d5-4f08-92f0-50e2d2de9065 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.312464] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 578.312712] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 578.313573] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a81257e-0916-46d0-822d-ec68116cf1e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.318850] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 578.318850] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524227e2-e800-dde6-c3f2-09d9a1116770" [ 578.318850] env[62383]: _type = "Task" [ 578.318850] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.327128] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524227e2-e800-dde6-c3f2-09d9a1116770, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.414371] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5135b7fe-a1d4-4734-9ad3-4464eaea64f6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.645s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.587982] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d4d19eca-d4ab-48d7-a88e-d1b6c6391292 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "905304d9-0288-442a-a024-625dc212e5b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.310s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.610620] env[62383]: DEBUG nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 578.650483] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:24:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1670368632',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-954110186',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 578.650734] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.650885] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 578.651080] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.651229] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 578.651389] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 578.651666] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 578.651854] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 578.652172] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 578.652441] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 578.652717] env[62383]: DEBUG nova.virt.hardware [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 578.653734] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a009b7e-2c17-44c7-be0f-55b8cbe33046 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.666901] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a58370-f5be-451f-9cdd-8663a7c608e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.687468] env[62383]: DEBUG nova.scheduler.client.report [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 578.754697] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451028, 'name': ReconfigVM_Task, 'duration_secs': 0.384774} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.756358] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 14bb9b79-d224-4a64-861e-30dd919c5741/14bb9b79-d224-4a64-861e-30dd919c5741.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 578.756358] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea57b477-c396-41ed-a6b9-4ee66a4d71c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.765160] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 578.765160] env[62383]: value = "task-2451029" [ 578.765160] env[62383]: _type = "Task" [ 578.765160] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.775185] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451029, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.830524] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524227e2-e800-dde6-c3f2-09d9a1116770, 'name': SearchDatastore_Task, 'duration_secs': 0.012846} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.830706] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84a36c32-217c-458a-87fc-9206b8c2019d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.837481] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 578.837481] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525e8101-d2c5-4de3-22db-6ebaf83c7686" [ 578.837481] env[62383]: _type = "Task" [ 578.837481] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.848869] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525e8101-d2c5-4de3-22db-6ebaf83c7686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.849933] env[62383]: DEBUG nova.network.neutron [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Updated VIF entry in instance network info cache for port 7969303b-5cee-496e-841d-a0a254ed01e3. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 578.850285] env[62383]: DEBUG nova.network.neutron [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Updating instance_info_cache with network_info: [{"id": "7969303b-5cee-496e-841d-a0a254ed01e3", "address": "fa:16:3e:c8:6d:55", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7969303b-5c", "ovs_interfaceid": "7969303b-5cee-496e-841d-a0a254ed01e3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.918068] env[62383]: DEBUG nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 578.962851] env[62383]: INFO nova.compute.manager [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Rebuilding instance [ 579.012744] env[62383]: DEBUG nova.compute.manager [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 579.014064] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d930eec1-66af-4573-927a-718d43a4174e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.091120] env[62383]: DEBUG nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 579.192920] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.194029] env[62383]: DEBUG nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 579.200403] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.855s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.200403] env[62383]: DEBUG nova.objects.instance [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lazy-loading 'resources' on Instance uuid ab338058-13c8-4df9-ba55-fabe1952557d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 579.275323] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451029, 'name': Rename_Task, 'duration_secs': 0.416344} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.276285] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 579.276285] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75df5f55-b2fc-42ce-9ebf-8a2caacb3d8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.286110] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 579.286110] env[62383]: value = "task-2451030" [ 579.286110] env[62383]: _type = "Task" [ 579.286110] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.303056] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451030, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.352842] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.353127] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.353631] env[62383]: DEBUG oslo_concurrency.lockutils [req-47a81ccf-6d7a-45e3-9b2a-fc973ded6b64 req-c095c0b6-6d20-4cb4-bbc2-6479e852b622 service nova] Releasing lock "refresh_cache-dd0ad4e3-a6e6-4258-b960-544984e24ebc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.354037] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525e8101-d2c5-4de3-22db-6ebaf83c7686, 'name': SearchDatastore_Task, 'duration_secs': 0.014923} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.355308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.355308] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] dd0ad4e3-a6e6-4258-b960-544984e24ebc/dd0ad4e3-a6e6-4258-b960-544984e24ebc.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 579.355308] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58ece37b-da4e-437f-912c-18d2c43a7976 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.362046] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 579.362046] env[62383]: value = "task-2451031" [ 579.362046] env[62383]: _type = "Task" [ 579.362046] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.372039] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.449792] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.623009] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.688601] env[62383]: DEBUG nova.network.neutron [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Successfully updated port: cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 579.701649] env[62383]: DEBUG nova.compute.utils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 579.702942] env[62383]: DEBUG nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 579.703087] env[62383]: DEBUG nova.network.neutron [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 579.787424] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "571a5250-8655-4f30-b193-919affbc1bd8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.787424] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "571a5250-8655-4f30-b193-919affbc1bd8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.787424] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "571a5250-8655-4f30-b193-919affbc1bd8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.787565] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "571a5250-8655-4f30-b193-919affbc1bd8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.789969] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "571a5250-8655-4f30-b193-919affbc1bd8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.793649] env[62383]: INFO nova.compute.manager [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Terminating instance [ 579.811075] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451030, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.812437] env[62383]: DEBUG nova.policy [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '226e0b9e27f049fda90e3b85780ef88a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '189ee1f3f4c8461baa6dcc713b5f36f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 579.883867] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451031, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.984218] env[62383]: DEBUG nova.compute.manager [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Received event network-changed-925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 579.984218] env[62383]: DEBUG nova.compute.manager [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Refreshing instance network info cache due to event network-changed-925071ab-96dd-4c80-901e-9dba6c4a5a9c. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 579.984595] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] Acquiring lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.984595] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] Acquired lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.987046] env[62383]: DEBUG nova.network.neutron [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Refreshing network info cache for port 925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 580.030785] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 580.031126] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1023538e-5243-4758-a09d-7cddf4d1742b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.038698] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 580.038698] env[62383]: value = "task-2451032" [ 580.038698] env[62383]: _type = "Task" [ 580.038698] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.060583] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451032, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.191990] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.192273] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.192273] env[62383]: DEBUG nova.network.neutron [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 580.212237] env[62383]: DEBUG nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 580.306740] env[62383]: DEBUG nova.compute.manager [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 580.307356] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 580.319160] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f9211a-655c-49ee-a479-2b2a38c39b32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.322490] env[62383]: DEBUG oslo_vmware.api [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451030, 'name': PowerOnVM_Task, 'duration_secs': 0.801037} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.325178] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 580.325387] env[62383]: INFO nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Took 10.17 seconds to spawn the instance on the hypervisor. [ 580.326836] env[62383]: DEBUG nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 580.326971] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3bd341-b87f-41a3-8b7b-81fcf94c746c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.332595] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 580.333241] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12eb6026-fdd8-447b-b86e-a5d40bf0b9da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.350712] env[62383]: DEBUG oslo_vmware.api [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 580.350712] env[62383]: value = "task-2451033" [ 580.350712] env[62383]: _type = "Task" [ 580.350712] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.360478] env[62383]: DEBUG oslo_vmware.api [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451033, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.375721] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451031, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564378} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.376116] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] dd0ad4e3-a6e6-4258-b960-544984e24ebc/dd0ad4e3-a6e6-4258-b960-544984e24ebc.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 580.378356] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 580.378953] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b901c5a1-a16d-4a8e-8290-d2827dfa40ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.386493] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8437f8b6-17bb-4d04-a704-77946fc23136 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.395096] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 580.395096] env[62383]: value = "task-2451034" [ 580.395096] env[62383]: _type = "Task" [ 580.395096] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.406144] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5d6fcd-af8c-4cc4-8bfe-47b0a4112a2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.422816] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451034, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.455833] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11013f8-9aff-4ab7-bb67-c35e993eeced {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.463518] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfbd8af-5651-4173-9f90-d84633e7eead {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.479838] env[62383]: DEBUG nova.compute.provider_tree [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.549132] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451032, 'name': PowerOffVM_Task, 'duration_secs': 0.258347} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.549499] env[62383]: DEBUG nova.network.neutron [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Successfully created port: f072e9db-418e-4a2d-a8a0-3d6d74444bb7 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 580.551381] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 580.551731] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 580.552464] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0addfd87-b3aa-4c86-bea8-34e85d2450d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.559587] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 580.559831] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99a240df-ee4a-465a-a2a2-3864781bc947 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.638561] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 580.638791] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 580.638997] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleting the datastore file [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 580.639244] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a86ef29c-197d-432d-8347-ebd8e9c40a05 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.644975] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 580.644975] env[62383]: value = "task-2451036" [ 580.644975] env[62383]: _type = "Task" [ 580.644975] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.654516] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.772447] env[62383]: DEBUG nova.network.neutron [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 580.856543] env[62383]: INFO nova.compute.manager [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Took 25.43 seconds to build instance. [ 580.863294] env[62383]: DEBUG oslo_vmware.api [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451033, 'name': PowerOffVM_Task, 'duration_secs': 0.171822} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.865715] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 580.865892] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 580.866154] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3a07d46-5981-425c-b06a-faeb184035c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.905363] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451034, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072282} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.905641] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 580.906348] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28d8faf-1979-4ba4-ba4b-935b7bdbe145 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.929253] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] dd0ad4e3-a6e6-4258-b960-544984e24ebc/dd0ad4e3-a6e6-4258-b960-544984e24ebc.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 580.930591] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b016761-8cbf-4959-bcd8-20e065cbeeea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.946845] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 580.947069] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 580.947247] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Deleting the datastore file [datastore2] 571a5250-8655-4f30-b193-919affbc1bd8 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 580.950439] env[62383]: DEBUG nova.network.neutron [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updated VIF entry in instance network info cache for port 925071ab-96dd-4c80-901e-9dba6c4a5a9c. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 580.950746] env[62383]: DEBUG nova.network.neutron [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 580.952147] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07d30d5b-b86e-44e6-94d9-69e310bd579d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.958412] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 580.958412] env[62383]: value = "task-2451038" [ 580.958412] env[62383]: _type = "Task" [ 580.958412] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.959699] env[62383]: DEBUG oslo_vmware.api [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for the task: (returnval){ [ 580.959699] env[62383]: value = "task-2451039" [ 580.959699] env[62383]: _type = "Task" [ 580.959699] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.971201] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.975399] env[62383]: DEBUG oslo_vmware.api [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.982995] env[62383]: DEBUG nova.scheduler.client.report [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 581.156131] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173499} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.156131] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 581.156131] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 581.156131] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 581.189020] env[62383]: DEBUG nova.network.neutron [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Updating instance_info_cache with network_info: [{"id": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "address": "fa:16:3e:c5:92:47", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc45bc62-e8", "ovs_interfaceid": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.230315] env[62383]: DEBUG nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 581.267513] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 581.267513] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.267513] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 581.267714] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.267714] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 581.267714] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 581.267714] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 581.268076] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 581.268076] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 581.268189] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 581.268376] env[62383]: DEBUG nova.virt.hardware [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 581.269519] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d82613-8008-4c80-87cb-cc86a8c668fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.282035] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ee1ee6-70b4-45ab-8e07-6ac0348f3263 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.360959] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f067a2a7-fae5-48da-9df9-fa2a13a1115a tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.941s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.455189] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc5d22a1-efc4-4c5c-b2e8-2779b6b28438 req-48397933-bab7-4212-ad28-86a66eb48807 service nova] Releasing lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.472941] env[62383]: DEBUG oslo_vmware.api [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Task: {'id': task-2451039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165849} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.476318] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 581.476574] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 581.476900] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 581.477125] env[62383]: INFO nova.compute.manager [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Took 1.17 seconds to destroy the instance on the hypervisor. [ 581.477464] env[62383]: DEBUG oslo.service.loopingcall [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.477674] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451038, 'name': ReconfigVM_Task, 'duration_secs': 0.288594} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.477904] env[62383]: DEBUG nova.compute.manager [-] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 581.478043] env[62383]: DEBUG nova.network.neutron [-] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 581.480121] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Reconfigured VM instance instance-0000000e to attach disk [datastore2] dd0ad4e3-a6e6-4258-b960-544984e24ebc/dd0ad4e3-a6e6-4258-b960-544984e24ebc.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 581.480487] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ac78ade-0281-4102-af19-8f7620859663 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.487028] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 581.487028] env[62383]: value = "task-2451040" [ 581.487028] env[62383]: _type = "Task" [ 581.487028] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.491345] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.294s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.493437] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.426s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.495474] env[62383]: INFO nova.compute.claims [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.506872] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451040, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.519688] env[62383]: INFO nova.scheduler.client.report [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Deleted allocations for instance ab338058-13c8-4df9-ba55-fabe1952557d [ 581.691915] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Releasing lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 581.691915] env[62383]: DEBUG nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Instance network_info: |[{"id": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "address": "fa:16:3e:c5:92:47", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc45bc62-e8", "ovs_interfaceid": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 581.692084] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:92:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc45bc62-e82d-40dc-b803-56b790aca5d4', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 581.700551] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Creating folder: Project (75346deaf9ad40fa925d4aff9fdff2cc). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.700852] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08959b99-046d-40c6-af54-8bec2f418ff0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.717859] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Created folder: Project (75346deaf9ad40fa925d4aff9fdff2cc) in parent group-v496304. [ 581.719017] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Creating folder: Instances. Parent ref: group-v496343. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 581.719017] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67ac3f80-ef16-4d19-a79d-51adb03a0203 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.727547] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Created folder: Instances in parent group-v496343. [ 581.727793] env[62383]: DEBUG oslo.service.loopingcall [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 581.727977] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 581.728190] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a774d26-d04c-4b33-aea8-1372ab3dee80 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.751443] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 581.751443] env[62383]: value = "task-2451043" [ 581.751443] env[62383]: _type = "Task" [ 581.751443] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.761437] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451043, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.863553] env[62383]: DEBUG nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 581.999219] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451040, 'name': Rename_Task, 'duration_secs': 0.143379} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.002235] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 582.002770] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45ae6823-8292-456a-8e1d-5b52336909fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.010400] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 582.010400] env[62383]: value = "task-2451044" [ 582.010400] env[62383]: _type = "Task" [ 582.010400] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.022293] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451044, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.029378] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6d33f268-ce22-4c09-8d29-ad8586eb7611 tempest-ServerDiagnosticsNegativeTest-1368891084 tempest-ServerDiagnosticsNegativeTest-1368891084-project-member] Lock "ab338058-13c8-4df9-ba55-fabe1952557d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.581s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.204307] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 582.204584] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.204747] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 582.204938] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.205076] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 582.205226] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 582.205440] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 582.205597] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 582.205757] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 582.206258] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 582.206258] env[62383]: DEBUG nova.virt.hardware [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 582.207596] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea52512-4c52-4aee-92ce-7b89ca10483c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.217923] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6440a918-0ed4-45c4-b0b9-1364ab6fc9d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.236334] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:85:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1a6d806-7723-4d98-843f-fe34d9a9f94c', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 582.244959] env[62383]: DEBUG oslo.service.loopingcall [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 582.246692] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 582.246962] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-424f98ba-a23f-4102-9a95-cbe1457f6666 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.275096] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451043, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.277075] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 582.277075] env[62383]: value = "task-2451045" [ 582.277075] env[62383]: _type = "Task" [ 582.277075] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.289891] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451045, 'name': CreateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.327876] env[62383]: DEBUG nova.network.neutron [-] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.393640] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.523806] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451044, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.714650] env[62383]: DEBUG nova.network.neutron [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Successfully updated port: f072e9db-418e-4a2d-a8a0-3d6d74444bb7 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 582.779525] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451043, 'name': CreateVM_Task, 'duration_secs': 0.532366} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.784191] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 582.784715] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.784893] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.785457] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 582.786756] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-971cb5f0-ba4c-4907-8517-3c1b94ad6919 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.795176] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451045, 'name': CreateVM_Task, 'duration_secs': 0.453977} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.795927] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 582.796778] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.798749] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 582.798749] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520fd3a3-eb64-664c-02e7-075917fd79cd" [ 582.798749] env[62383]: _type = "Task" [ 582.798749] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.806076] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520fd3a3-eb64-664c-02e7-075917fd79cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.830700] env[62383]: INFO nova.compute.manager [-] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Took 1.35 seconds to deallocate network for instance. [ 583.038993] env[62383]: DEBUG oslo_vmware.api [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451044, 'name': PowerOnVM_Task, 'duration_secs': 0.515093} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.043693] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 583.044030] env[62383]: INFO nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Took 10.35 seconds to spawn the instance on the hypervisor. [ 583.045051] env[62383]: DEBUG nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 583.051533] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d590a45-fd27-4cfe-a237-bb3faa494abe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.067948] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67efa795-0d77-45e5-9230-567ea32b8efc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.075611] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149dcb7a-0e7d-4890-acc4-3a634a120e6c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.114256] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9aa1177-63a2-462f-b57d-4fc290a94aea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.123179] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068f867f-f563-4443-b4c4-c3160a223b99 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.138762] env[62383]: DEBUG nova.compute.provider_tree [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.202840] env[62383]: DEBUG nova.compute.manager [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Received event network-vif-plugged-cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 583.202840] env[62383]: DEBUG oslo_concurrency.lockutils [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] Acquiring lock "67d41910-54e1-48f1-b0d3-f34a62595ef2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.204771] env[62383]: DEBUG oslo_concurrency.lockutils [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.205116] env[62383]: DEBUG oslo_concurrency.lockutils [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.205335] env[62383]: DEBUG nova.compute.manager [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] No waiting events found dispatching network-vif-plugged-cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 583.205550] env[62383]: WARNING nova.compute.manager [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Received unexpected event network-vif-plugged-cc45bc62-e82d-40dc-b803-56b790aca5d4 for instance with vm_state building and task_state spawning. [ 583.205747] env[62383]: DEBUG nova.compute.manager [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Received event network-changed-cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 583.205933] env[62383]: DEBUG nova.compute.manager [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Refreshing instance network info cache due to event network-changed-cc45bc62-e82d-40dc-b803-56b790aca5d4. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 583.206874] env[62383]: DEBUG oslo_concurrency.lockutils [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] Acquiring lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.207438] env[62383]: DEBUG oslo_concurrency.lockutils [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] Acquired lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.207653] env[62383]: DEBUG nova.network.neutron [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Refreshing network info cache for port cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.219689] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "refresh_cache-eedadcc7-d02e-4a21-a43a-1dccde81b3b4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.219689] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquired lock "refresh_cache-eedadcc7-d02e-4a21-a43a-1dccde81b3b4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.219689] env[62383]: DEBUG nova.network.neutron [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.310926] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520fd3a3-eb64-664c-02e7-075917fd79cd, 'name': SearchDatastore_Task, 'duration_secs': 0.013563} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.311282] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.311401] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 583.312108] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.312309] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.312490] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 583.312874] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.313451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 583.313451] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36fec093-a6f7-4917-94ed-73cfc704c206 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.315663] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d775bdd4-a3aa-411d-9c60-237d0003ce85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.321062] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 583.321062] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5292d3c1-99da-b7a2-db7a-1a97354a97af" [ 583.321062] env[62383]: _type = "Task" [ 583.321062] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.327611] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 583.327796] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 583.329164] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6d33dce-e604-4ad0-bac5-04ed85600c19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.334472] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5292d3c1-99da-b7a2-db7a-1a97354a97af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.338202] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 583.338202] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5255a3df-0218-e027-f7fb-62dda9e16b70" [ 583.338202] env[62383]: _type = "Task" [ 583.338202] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.342354] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.347745] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5255a3df-0218-e027-f7fb-62dda9e16b70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.580101] env[62383]: INFO nova.compute.manager [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Took 27.06 seconds to build instance. [ 583.642107] env[62383]: DEBUG nova.scheduler.client.report [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 583.838301] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5292d3c1-99da-b7a2-db7a-1a97354a97af, 'name': SearchDatastore_Task, 'duration_secs': 0.020786} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.842172] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.842172] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 583.842172] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.859150] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5255a3df-0218-e027-f7fb-62dda9e16b70, 'name': SearchDatastore_Task, 'duration_secs': 0.011007} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.859429] env[62383]: DEBUG nova.network.neutron [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.864273] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d646e666-ef4b-4323-9f9b-557ef018aa68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.874738] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 583.874738] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c8a403-a37a-90ab-5177-59d0f725d680" [ 583.874738] env[62383]: _type = "Task" [ 583.874738] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.886685] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c8a403-a37a-90ab-5177-59d0f725d680, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.910912] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "330b5e35-3292-4df7-b288-547b158e671a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.911263] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "330b5e35-3292-4df7-b288-547b158e671a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.959406] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.959655] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.020990] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "872ac212-9f29-426d-94c7-e1bf73aebd94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.021111] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.082320] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99002965-ed71-4bad-99cf-8e043a0573d2 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.586s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.152180] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.658s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.152371] env[62383]: DEBUG nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 584.155635] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.535s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.155918] env[62383]: DEBUG nova.objects.instance [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lazy-loading 'resources' on Instance uuid 8a2b209c-423c-446c-a769-f7d7820d46da {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 584.389435] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c8a403-a37a-90ab-5177-59d0f725d680, 'name': SearchDatastore_Task, 'duration_secs': 0.024062} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.389435] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.389820] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 67d41910-54e1-48f1-b0d3-f34a62595ef2/67d41910-54e1-48f1-b0d3-f34a62595ef2.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 584.390158] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.390804] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 584.391013] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f39efc16-e13e-4c36-a8ee-4102c0ebb64f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.393941] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-363e0e80-ffdd-4cdf-bd5a-a9508744885f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.401368] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 584.401368] env[62383]: value = "task-2451046" [ 584.401368] env[62383]: _type = "Task" [ 584.401368] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.405653] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 584.405780] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 584.407753] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7f3bf48-6776-4653-be1d-38f47cebc474 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.414885] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451046, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.417047] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 584.417047] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]523d4903-b5cc-2565-0e25-b3981e304487" [ 584.417047] env[62383]: _type = "Task" [ 584.417047] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.427411] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523d4903-b5cc-2565-0e25-b3981e304487, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.550176] env[62383]: DEBUG nova.network.neutron [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Updating instance_info_cache with network_info: [{"id": "f072e9db-418e-4a2d-a8a0-3d6d74444bb7", "address": "fa:16:3e:80:ce:c0", "network": {"id": "a4932c81-dfb9-4bfb-9aec-eac938aba18d", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1238856838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "189ee1f3f4c8461baa6dcc713b5f36f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf072e9db-41", "ovs_interfaceid": "f072e9db-418e-4a2d-a8a0-3d6d74444bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.586176] env[62383]: DEBUG nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 584.660387] env[62383]: DEBUG nova.compute.utils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 584.662432] env[62383]: DEBUG nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 584.662432] env[62383]: DEBUG nova.network.neutron [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 584.916606] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451046, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.928867] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523d4903-b5cc-2565-0e25-b3981e304487, 'name': SearchDatastore_Task, 'duration_secs': 0.020098} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.930162] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08fd4f06-b2ce-4851-9e27-2420a0c2f22a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.941081] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 584.941081] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5200ec79-2860-309e-d5fb-a7d958cc3470" [ 584.941081] env[62383]: _type = "Task" [ 584.941081] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.950791] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5200ec79-2860-309e-d5fb-a7d958cc3470, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.975213] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2e976e-6b25-4b04-bc2d-eb184b01852a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.983781] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Suspending the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 584.984124] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-5d711c77-abfe-4840-91e7-bb4dd64edb18 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.992964] env[62383]: DEBUG oslo_vmware.api [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] Waiting for the task: (returnval){ [ 584.992964] env[62383]: value = "task-2451047" [ 584.992964] env[62383]: _type = "Task" [ 584.992964] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.993786] env[62383]: DEBUG nova.network.neutron [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Updated VIF entry in instance network info cache for port cc45bc62-e82d-40dc-b803-56b790aca5d4. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 584.994124] env[62383]: DEBUG nova.network.neutron [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Updating instance_info_cache with network_info: [{"id": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "address": "fa:16:3e:c5:92:47", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc45bc62-e8", "ovs_interfaceid": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.004090] env[62383]: DEBUG oslo_vmware.api [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] Task: {'id': task-2451047, 'name': SuspendVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.025824] env[62383]: DEBUG nova.policy [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f811e2f3423e44d597363b1dc8fa5e2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '304a62370f8149049a797eb7077e910b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 585.052861] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Releasing lock "refresh_cache-eedadcc7-d02e-4a21-a43a-1dccde81b3b4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.053290] env[62383]: DEBUG nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Instance network_info: |[{"id": "f072e9db-418e-4a2d-a8a0-3d6d74444bb7", "address": "fa:16:3e:80:ce:c0", "network": {"id": "a4932c81-dfb9-4bfb-9aec-eac938aba18d", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1238856838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "189ee1f3f4c8461baa6dcc713b5f36f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf072e9db-41", "ovs_interfaceid": "f072e9db-418e-4a2d-a8a0-3d6d74444bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 585.054079] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:ce:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8a34aa30-95be-4b18-98ca-1f2d81f7e9e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f072e9db-418e-4a2d-a8a0-3d6d74444bb7', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.063401] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Creating folder: Project (189ee1f3f4c8461baa6dcc713b5f36f4). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.064444] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbb3498b-87db-4c6c-828d-89845481c7d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.075233] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Created folder: Project (189ee1f3f4c8461baa6dcc713b5f36f4) in parent group-v496304. [ 585.075459] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Creating folder: Instances. Parent ref: group-v496347. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.076187] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c05d2e1b-a752-4db1-b852-fa438a19ad51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.086039] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Created folder: Instances in parent group-v496347. [ 585.086039] env[62383]: DEBUG oslo.service.loopingcall [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.086039] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 585.086277] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aaf9a6ba-2ef8-4344-b05b-7860cfc5601d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.115502] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.115502] env[62383]: value = "task-2451050" [ 585.115502] env[62383]: _type = "Task" [ 585.115502] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.125630] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451050, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.132909] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.169235] env[62383]: DEBUG nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 585.302608] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c7b84a-b74a-4b16-998b-73e0719a4a7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.312243] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef2b283-661d-4a62-b436-4ccc5ff488c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.360103] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c938d2-27d9-42dc-ade2-435297c7fff0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.373666] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6623ff24-336d-4a90-970a-6fef61d06a7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.390627] env[62383]: DEBUG nova.compute.provider_tree [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.412990] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451046, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.452439] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5200ec79-2860-309e-d5fb-a7d958cc3470, 'name': SearchDatastore_Task, 'duration_secs': 0.069457} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.452753] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.455147] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 585.455147] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41758ffe-ffad-4da8-8091-fb0376db3ade {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.461446] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 585.461446] env[62383]: value = "task-2451051" [ 585.461446] env[62383]: _type = "Task" [ 585.461446] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.473050] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451051, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.503226] env[62383]: DEBUG oslo_concurrency.lockutils [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] Releasing lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.504037] env[62383]: DEBUG nova.compute.manager [req-4543749e-6c7f-4f86-934e-8d87edc40162 req-260801d0-3f62-4bf3-8985-1b5e9a3d62bf service nova] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Received event network-vif-deleted-28133d04-f592-4f43-9ade-58deef12e1f2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 585.509777] env[62383]: DEBUG oslo_vmware.api [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] Task: {'id': task-2451047, 'name': SuspendVM_Task} progress is 54%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.626681] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451050, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.650191] env[62383]: DEBUG nova.compute.manager [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Received event network-vif-plugged-f072e9db-418e-4a2d-a8a0-3d6d74444bb7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 585.651526] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Acquiring lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.652052] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.652812] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.653757] env[62383]: DEBUG nova.compute.manager [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] No waiting events found dispatching network-vif-plugged-f072e9db-418e-4a2d-a8a0-3d6d74444bb7 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 585.653757] env[62383]: WARNING nova.compute.manager [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Received unexpected event network-vif-plugged-f072e9db-418e-4a2d-a8a0-3d6d74444bb7 for instance with vm_state building and task_state spawning. [ 585.653876] env[62383]: DEBUG nova.compute.manager [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Received event network-changed-f072e9db-418e-4a2d-a8a0-3d6d74444bb7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 585.654112] env[62383]: DEBUG nova.compute.manager [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Refreshing instance network info cache due to event network-changed-f072e9db-418e-4a2d-a8a0-3d6d74444bb7. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 585.654382] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Acquiring lock "refresh_cache-eedadcc7-d02e-4a21-a43a-1dccde81b3b4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.654665] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Acquired lock "refresh_cache-eedadcc7-d02e-4a21-a43a-1dccde81b3b4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.654894] env[62383]: DEBUG nova.network.neutron [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Refreshing network info cache for port f072e9db-418e-4a2d-a8a0-3d6d74444bb7 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 585.894808] env[62383]: DEBUG nova.scheduler.client.report [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 585.913797] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451046, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.012979} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.914982] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 67d41910-54e1-48f1-b0d3-f34a62595ef2/67d41910-54e1-48f1-b0d3-f34a62595ef2.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 585.914982] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 585.917833] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39624dc3-51aa-46ff-a709-1d637c9961ee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.926267] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 585.926267] env[62383]: value = "task-2451052" [ 585.926267] env[62383]: _type = "Task" [ 585.926267] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.943643] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451052, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.975445] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451051, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.004715] env[62383]: DEBUG oslo_vmware.api [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] Task: {'id': task-2451047, 'name': SuspendVM_Task} progress is 54%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.134038] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451050, 'name': CreateVM_Task, 'duration_secs': 0.6029} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.134038] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 586.135686] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.135686] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.135686] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 586.136239] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dd5d6ab-a18c-4d81-abe2-49ec8b3a5b93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.141945] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 586.141945] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52692558-2ba6-98ee-813b-a9f06d8b997a" [ 586.141945] env[62383]: _type = "Task" [ 586.141945] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.163456] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52692558-2ba6-98ee-813b-a9f06d8b997a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.184385] env[62383]: DEBUG nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 586.219904] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 586.220159] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.220317] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 586.220549] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.220635] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 586.220841] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 586.221457] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 586.221589] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 586.221816] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 586.221993] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 586.222239] env[62383]: DEBUG nova.virt.hardware [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 586.223181] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86dc8dcd-ce8b-4d59-8603-7292fc613826 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.233302] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ee2317-542e-418a-90dd-b2d119e37568 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.342160] env[62383]: DEBUG nova.network.neutron [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Successfully created port: f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.402601] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.247s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.409738] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.631s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.441736] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451052, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.215288} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.443809] env[62383]: INFO nova.scheduler.client.report [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Deleted allocations for instance 8a2b209c-423c-446c-a769-f7d7820d46da [ 586.443809] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 586.444761] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f791602-44f5-4b39-b305-5627579a5cd8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.476742] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 67d41910-54e1-48f1-b0d3-f34a62595ef2/67d41910-54e1-48f1-b0d3-f34a62595ef2.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 586.480885] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac36cddd-236e-422c-a701-620159f8c903 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.505175] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451051, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689469} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.509350] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 586.509883] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 586.510374] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 586.510374] env[62383]: value = "task-2451053" [ 586.510374] env[62383]: _type = "Task" [ 586.510374] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.513628] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fec5481b-4ed7-4c19-816d-17008784dcb7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.516473] env[62383]: DEBUG oslo_vmware.api [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] Task: {'id': task-2451047, 'name': SuspendVM_Task, 'duration_secs': 1.19924} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.520052] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Suspended the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 586.520312] env[62383]: DEBUG nova.compute.manager [None req-4a6dea44-1257-48f7-bab8-9053ab6207d2 tempest-ServersAdminNegativeTestJSON-752432111 tempest-ServersAdminNegativeTestJSON-752432111-project-admin] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 586.522389] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c228c42-22c4-426a-a1f3-97b5fed52441 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.526310] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 586.526310] env[62383]: value = "task-2451054" [ 586.526310] env[62383]: _type = "Task" [ 586.526310] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.537172] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.542016] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.650906] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52692558-2ba6-98ee-813b-a9f06d8b997a, 'name': SearchDatastore_Task, 'duration_secs': 0.011206} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.651225] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.651457] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.651690] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.651835] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.652016] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.652280] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-779b22f4-fc2f-40d7-97b2-711ed4a3e244 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.662110] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.662110] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 586.662110] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87a126fe-8b20-4578-8de7-2ce158ed00d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.668582] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 586.668582] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5272f0d1-04cc-1f25-16cc-811102699def" [ 586.668582] env[62383]: _type = "Task" [ 586.668582] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.675375] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5272f0d1-04cc-1f25-16cc-811102699def, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.916721] env[62383]: INFO nova.compute.claims [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.956827] env[62383]: DEBUG oslo_concurrency.lockutils [None req-73907f8d-bed8-45e7-b53e-45b8712e0183 tempest-ServerDiagnosticsTest-1419269817 tempest-ServerDiagnosticsTest-1419269817-project-member] Lock "8a2b209c-423c-446c-a769-f7d7820d46da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.206s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.998528] env[62383]: DEBUG nova.network.neutron [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Updated VIF entry in instance network info cache for port f072e9db-418e-4a2d-a8a0-3d6d74444bb7. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 586.998891] env[62383]: DEBUG nova.network.neutron [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Updating instance_info_cache with network_info: [{"id": "f072e9db-418e-4a2d-a8a0-3d6d74444bb7", "address": "fa:16:3e:80:ce:c0", "network": {"id": "a4932c81-dfb9-4bfb-9aec-eac938aba18d", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1238856838-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "189ee1f3f4c8461baa6dcc713b5f36f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8a34aa30-95be-4b18-98ca-1f2d81f7e9e6", "external-id": "nsx-vlan-transportzone-234", "segmentation_id": 234, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf072e9db-41", "ovs_interfaceid": "f072e9db-418e-4a2d-a8a0-3d6d74444bb7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.027655] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.036306] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070289} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.036787] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 587.037667] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c326d45-8b72-467b-978c-428e6fc25c65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.072567] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 587.073098] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a91fecc-db5f-4c71-b39f-92e6afd63e7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.100040] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 587.100040] env[62383]: value = "task-2451055" [ 587.100040] env[62383]: _type = "Task" [ 587.100040] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.111045] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451055, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.181358] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5272f0d1-04cc-1f25-16cc-811102699def, 'name': SearchDatastore_Task, 'duration_secs': 0.008799} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.182413] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-614c8cfa-0793-4062-b048-578ab1f1a861 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.190311] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 587.190311] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fb8128-632f-ee8b-60ae-0315c73cac54" [ 587.190311] env[62383]: _type = "Task" [ 587.190311] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.200743] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fb8128-632f-ee8b-60ae-0315c73cac54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.427470] env[62383]: INFO nova.compute.resource_tracker [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating resource usage from migration e6999db4-45d8-4db8-bb73-e60e42a7ccfb [ 587.508126] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Releasing lock "refresh_cache-eedadcc7-d02e-4a21-a43a-1dccde81b3b4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.508500] env[62383]: DEBUG nova.compute.manager [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Received event network-changed-940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 587.508852] env[62383]: DEBUG nova.compute.manager [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Refreshing instance network info cache due to event network-changed-940c815a-1c07-492f-8b17-e4a57d123790. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 587.509031] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Acquiring lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.509190] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Acquired lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.509410] env[62383]: DEBUG nova.network.neutron [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Refreshing network info cache for port 940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 587.527783] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451053, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.614278] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451055, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.706225] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fb8128-632f-ee8b-60ae-0315c73cac54, 'name': SearchDatastore_Task, 'duration_secs': 0.016211} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.709556] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.709556] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] eedadcc7-d02e-4a21-a43a-1dccde81b3b4/eedadcc7-d02e-4a21-a43a-1dccde81b3b4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 587.710027] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e124d58-a5e3-4433-8acd-138d6a85d56c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.716144] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 587.716144] env[62383]: value = "task-2451056" [ 587.716144] env[62383]: _type = "Task" [ 587.716144] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.727266] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.925924] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355254b5-ca93-4bb9-b0f3-80c65b5c24af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.934467] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4640a56b-2928-45ca-a6dc-08de0455d6d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.974466] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201a304a-4841-4fbb-95c7-bc8a2683e7d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.985845] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49c0f1c-b43d-4382-9caa-24c57d5868c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.004562] env[62383]: DEBUG nova.compute.provider_tree [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.028119] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451053, 'name': ReconfigVM_Task, 'duration_secs': 1.069052} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.028547] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 67d41910-54e1-48f1-b0d3-f34a62595ef2/67d41910-54e1-48f1-b0d3-f34a62595ef2.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.029264] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0557d005-fc53-4e96-bffd-c4855468d81e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.037429] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 588.037429] env[62383]: value = "task-2451057" [ 588.037429] env[62383]: _type = "Task" [ 588.037429] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.050167] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451057, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.115684] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451055, 'name': ReconfigVM_Task, 'duration_secs': 0.648755} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.115992] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Reconfigured VM instance instance-0000000b to attach disk [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4/a170fd95-3f7f-4315-a063-b9d02a7a1af4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.116808] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f3cca00-a936-40fe-857e-22a8a2bc584d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.126094] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 588.126094] env[62383]: value = "task-2451058" [ 588.126094] env[62383]: _type = "Task" [ 588.126094] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.135598] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451058, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.229501] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451056, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.508613] env[62383]: DEBUG nova.scheduler.client.report [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 588.548099] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451057, 'name': Rename_Task, 'duration_secs': 0.291818} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.548553] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 588.548739] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d89bea6-a20f-4568-8c83-1c1f1287ebd4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.555013] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 588.555013] env[62383]: value = "task-2451059" [ 588.555013] env[62383]: _type = "Task" [ 588.555013] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.563349] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.636986] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451058, 'name': Rename_Task, 'duration_secs': 0.242225} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.637539] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 588.637874] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98078de3-41ae-4423-bab9-af0211687edc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.644297] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 588.644297] env[62383]: value = "task-2451060" [ 588.644297] env[62383]: _type = "Task" [ 588.644297] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.652652] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451060, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.727606] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451056, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570042} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.727606] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] eedadcc7-d02e-4a21-a43a-1dccde81b3b4/eedadcc7-d02e-4a21-a43a-1dccde81b3b4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 588.727606] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 588.727606] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e905616-38a6-4b2e-a772-dc12a5c98795 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.735402] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 588.735402] env[62383]: value = "task-2451061" [ 588.735402] env[62383]: _type = "Task" [ 588.735402] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.743726] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.898830] env[62383]: DEBUG nova.network.neutron [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Updated VIF entry in instance network info cache for port 940c815a-1c07-492f-8b17-e4a57d123790. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 588.899105] env[62383]: DEBUG nova.network.neutron [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Updating instance_info_cache with network_info: [{"id": "940c815a-1c07-492f-8b17-e4a57d123790", "address": "fa:16:3e:9a:67:f3", "network": {"id": "3f07f890-19a6-41bf-9948-3601561d0dae", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-243075880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9e3f9cc0f33c4f6b9bee57e176d9048e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap940c815a-1c", "ovs_interfaceid": "940c815a-1c07-492f-8b17-e4a57d123790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.016332] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.605s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.016332] env[62383]: INFO nova.compute.manager [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Migrating [ 589.016725] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.016725] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.018104] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.035s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.019705] env[62383]: INFO nova.compute.claims [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.026787] env[62383]: INFO nova.compute.rpcapi [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 589.027404] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.071216] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451059, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.156338] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451060, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.250201] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067321} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.250907] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 589.252179] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1b0a70-daa2-4115-80b8-c505ce447be0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.286805] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] eedadcc7-d02e-4a21-a43a-1dccde81b3b4/eedadcc7-d02e-4a21-a43a-1dccde81b3b4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 589.287835] env[62383]: DEBUG nova.network.neutron [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Successfully updated port: f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 589.289036] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fee5bc50-1d17-4c91-8113-5235766caa64 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.308516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.308516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.308516] env[62383]: DEBUG nova.network.neutron [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.316328] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 589.316328] env[62383]: value = "task-2451062" [ 589.316328] env[62383]: _type = "Task" [ 589.316328] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.326922] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451062, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.402829] env[62383]: DEBUG oslo_concurrency.lockutils [req-3af8c046-2cba-4953-8019-8fe6c5c6c304 req-ac479a32-f0f0-4e6b-824d-c98ab3380209 service nova] Releasing lock "refresh_cache-14bb9b79-d224-4a64-861e-30dd919c5741" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.552476] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.552837] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.553155] env[62383]: DEBUG nova.network.neutron [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 589.577803] env[62383]: DEBUG oslo_vmware.api [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451059, 'name': PowerOnVM_Task, 'duration_secs': 0.540685} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.578128] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 589.578294] env[62383]: INFO nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Took 10.97 seconds to spawn the instance on the hypervisor. [ 589.578955] env[62383]: DEBUG nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 589.582019] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ceb64b-3fe6-47a1-97e8-73143c16ee61 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.621563] env[62383]: DEBUG nova.compute.manager [req-2d60dff1-ef6a-44b1-8945-4c6d3bbe1c36 req-935a0dd0-a810-4b68-b060-ab979604fdff service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Received event network-vif-plugged-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 589.621780] env[62383]: DEBUG oslo_concurrency.lockutils [req-2d60dff1-ef6a-44b1-8945-4c6d3bbe1c36 req-935a0dd0-a810-4b68-b060-ab979604fdff service nova] Acquiring lock "80821717-f961-49c7-8b79-c152edfdfb94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.621983] env[62383]: DEBUG oslo_concurrency.lockutils [req-2d60dff1-ef6a-44b1-8945-4c6d3bbe1c36 req-935a0dd0-a810-4b68-b060-ab979604fdff service nova] Lock "80821717-f961-49c7-8b79-c152edfdfb94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.622282] env[62383]: DEBUG oslo_concurrency.lockutils [req-2d60dff1-ef6a-44b1-8945-4c6d3bbe1c36 req-935a0dd0-a810-4b68-b060-ab979604fdff service nova] Lock "80821717-f961-49c7-8b79-c152edfdfb94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.622636] env[62383]: DEBUG nova.compute.manager [req-2d60dff1-ef6a-44b1-8945-4c6d3bbe1c36 req-935a0dd0-a810-4b68-b060-ab979604fdff service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] No waiting events found dispatching network-vif-plugged-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 589.622636] env[62383]: WARNING nova.compute.manager [req-2d60dff1-ef6a-44b1-8945-4c6d3bbe1c36 req-935a0dd0-a810-4b68-b060-ab979604fdff service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Received unexpected event network-vif-plugged-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 for instance with vm_state building and task_state spawning. [ 589.660742] env[62383]: DEBUG oslo_vmware.api [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451060, 'name': PowerOnVM_Task, 'duration_secs': 0.838385} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.661042] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 589.661246] env[62383]: DEBUG nova.compute.manager [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 589.662678] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea193e2e-74f0-4953-9529-4ee2c2dd0648 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.826999] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451062, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.913172] env[62383]: DEBUG nova.network.neutron [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 590.105810] env[62383]: INFO nova.compute.manager [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Took 29.34 seconds to build instance. [ 590.180674] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.328785] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451062, 'name': ReconfigVM_Task, 'duration_secs': 0.727381} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.329088] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Reconfigured VM instance instance-00000010 to attach disk [datastore2] eedadcc7-d02e-4a21-a43a-1dccde81b3b4/eedadcc7-d02e-4a21-a43a-1dccde81b3b4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 590.329690] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97f3f81d-7a79-4a4a-b78f-e5b8a55a4b3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.342254] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 590.342254] env[62383]: value = "task-2451063" [ 590.342254] env[62383]: _type = "Task" [ 590.342254] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.354865] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451063, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.610339] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5200c8e1-5629-4e28-9924-345d724923c3 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.490s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.644946] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee22d3e-f7ed-407d-b671-5a4efbc1b95e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.654055] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64464e28-2ac4-4cd3-86b2-f8e66354a6c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.688261] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17bb8253-bccf-464e-ad78-558fcbcb6436 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.697043] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1736162c-a2b5-4efc-9959-50f4ffe1b4dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.714366] env[62383]: DEBUG nova.compute.provider_tree [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.857328] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451063, 'name': Rename_Task, 'duration_secs': 0.252758} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.857619] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 590.858765] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-714d258b-9b2c-48e0-8f2d-d868254d0109 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.869189] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 590.869189] env[62383]: value = "task-2451064" [ 590.869189] env[62383]: _type = "Task" [ 590.869189] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.877734] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.882856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.882856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.118814] env[62383]: DEBUG nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 591.153150] env[62383]: DEBUG nova.network.neutron [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Updating instance_info_cache with network_info: [{"id": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "address": "fa:16:3e:e3:87:1e", "network": {"id": "86f13b93-2f69-4ec7-b838-5d8cb11a1051", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1138221332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "304a62370f8149049a797eb7077e910b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6691e8a-b0", "ovs_interfaceid": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.218325] env[62383]: DEBUG nova.scheduler.client.report [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 591.379963] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451064, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.401244] env[62383]: DEBUG nova.network.neutron [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance_info_cache with network_info: [{"id": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "address": "fa:16:3e:21:59:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5205d6ef-09", "ovs_interfaceid": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.656811] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Releasing lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.656811] env[62383]: DEBUG nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Instance network_info: |[{"id": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "address": "fa:16:3e:e3:87:1e", "network": {"id": "86f13b93-2f69-4ec7-b838-5d8cb11a1051", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1138221332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "304a62370f8149049a797eb7077e910b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6691e8a-b0", "ovs_interfaceid": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 591.657205] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:87:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6691e8a-b0ad-4db1-b2f2-c313f3c50a51', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 591.667084] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Creating folder: Project (304a62370f8149049a797eb7077e910b). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.667270] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.667552] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fa8726a-e62d-4bea-9a69-ecc1bb4a243a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.682388] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Created folder: Project (304a62370f8149049a797eb7077e910b) in parent group-v496304. [ 591.682388] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Creating folder: Instances. Parent ref: group-v496350. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 591.682633] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-043228fe-b78f-4029-9d82-a66fb97ef26d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.694144] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Created folder: Instances in parent group-v496350. [ 591.694144] env[62383]: DEBUG oslo.service.loopingcall [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 591.694144] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 591.694144] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c22dc562-4f18-4874-ac8e-ea7cb3c21476 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.722060] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.722060] env[62383]: value = "task-2451067" [ 591.722060] env[62383]: _type = "Task" [ 591.722060] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.731341] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.731651] env[62383]: DEBUG nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 591.734518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.536s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.734744] env[62383]: DEBUG nova.objects.instance [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lazy-loading 'resources' on Instance uuid 9659a2dd-f1da-4a8e-a740-1ec01f96940c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 591.741997] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451067, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.887351] env[62383]: DEBUG oslo_vmware.api [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451064, 'name': PowerOnVM_Task, 'duration_secs': 0.888593} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.887351] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 591.887351] env[62383]: INFO nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Took 10.66 seconds to spawn the instance on the hypervisor. [ 591.887351] env[62383]: DEBUG nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 591.889960] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6ef107-7a54-47c4-91c6-c71caf0512f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.904477] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.239661] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451067, 'name': CreateVM_Task, 'duration_secs': 0.362019} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.243487] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 592.245178] env[62383]: DEBUG nova.compute.utils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 592.250178] env[62383]: DEBUG oslo_vmware.service [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb89a891-8a15-4750-952b-5a88fc89fcaa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.254019] env[62383]: DEBUG nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Not allocating networking since 'none' was specified. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 592.256688] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.256688] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.256789] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 592.258787] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5e2c78-a213-4236-84b3-178a70f23756 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.268103] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 592.268103] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]523949ed-1e02-d311-1f63-ed148b408b67" [ 592.268103] env[62383]: _type = "Task" [ 592.268103] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.280184] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523949ed-1e02-d311-1f63-ed148b408b67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.388315] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.388592] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 592.427848] env[62383]: INFO nova.compute.manager [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Took 30.33 seconds to build instance. [ 592.752631] env[62383]: DEBUG nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 592.783242] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.783515] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.783753] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.783903] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.784094] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.784354] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10610ced-eb20-4ea5-930d-9be1a4422fb3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.814032] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.814233] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.822338] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d36274-0071-4a1d-b91a-56e37e9dd9cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.830495] env[62383]: DEBUG nova.compute.manager [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Received event network-changed-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 592.830495] env[62383]: DEBUG nova.compute.manager [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Refreshing instance network info cache due to event network-changed-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 592.830495] env[62383]: DEBUG oslo_concurrency.lockutils [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] Acquiring lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 592.830495] env[62383]: DEBUG oslo_concurrency.lockutils [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] Acquired lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.830695] env[62383]: DEBUG nova.network.neutron [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Refreshing network info cache for port f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 592.836893] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc41b43-fd7a-4d4e-9e80-62ebe75cfc4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.846679] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 592.846679] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522b64f7-e43b-1137-b717-ec6858b9d0b9" [ 592.846679] env[62383]: _type = "Task" [ 592.846679] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.864038] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Preparing fetch location {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 592.864355] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Creating directory with path [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.864820] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caf3acf3-e1ed-4605-8596-5eaf02377886 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.888298] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Created directory with path [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.888566] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Fetch image to [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 592.888752] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Downloading image file data cac3b430-a1d5-4ad1-92ec-34c2261779a8 to [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk on the data store datastore1 {{(pid=62383) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 592.889621] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f9acca-161a-4aae-b58f-f8714bb42730 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.899294] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7396a1e-b698-43e7-9ed7-afdfabcfb9b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.912641] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6984baf9-8dc6-4464-a60c-cfa6c07938c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.953811] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cb97922b-652f-45ff-bd44-ca1bd4d313ca tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.880s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 592.958575] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2f1ca7-ac17-45bc-ac49-f1a1fc7e011d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.970198] env[62383]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ac61d8e8-bdfe-4fb2-8a98-c7439bf0c148 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.993781] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Downloading image file data cac3b430-a1d5-4ad1-92ec-34c2261779a8 to the data store datastore1 {{(pid=62383) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 592.997598] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7b58f2-d573-4669-a4b2-c03de9da6b94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.007390] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af78ec9-9062-439e-94bd-7c55517209a0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.053238] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edae469-3215-47f4-be2a-a47c0bc5160a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.061484] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a145e293-cf77-461e-a5d1-445dc1480e35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.080708] env[62383]: DEBUG nova.compute.provider_tree [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.106580] env[62383]: DEBUG oslo_vmware.rw_handles [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62383) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 593.318818] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.318818] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.319351] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.319564] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.320234] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.323724] env[62383]: INFO nova.compute.manager [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Terminating instance [ 593.462034] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf02cea-d930-463e-8c4d-38d2536a1982 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.464892] env[62383]: DEBUG nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 593.488544] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance 'a8d56b8e-fa11-4844-ab65-a2e5d24b1e07' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 593.586312] env[62383]: DEBUG nova.scheduler.client.report [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 593.766493] env[62383]: DEBUG nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 593.805142] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 593.805406] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.805566] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 593.805741] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.805906] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 593.806135] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 593.806422] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 593.806761] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 593.806761] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 593.806921] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 593.807159] env[62383]: DEBUG nova.virt.hardware [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 593.808062] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ba22a5-d280-41f6-ad67-890c6281cf37 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.820084] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da82cc3-b8e8-4f9e-92f0-7b2abbef9d55 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.836985] env[62383]: DEBUG nova.compute.manager [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 593.837228] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 593.837668] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.844027] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Creating folder: Project (5eafa6cf7d6446a88bfa673b1f577255). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.850324] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd72b07-67ae-4445-bce6-5ae7f15722fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.853236] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4eee78aa-64d5-46d7-a4f9-5fae104ea892 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.859619] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 593.859844] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2e154c1-dd1e-4c82-996d-e824dee2c9c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.863766] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Created folder: Project (5eafa6cf7d6446a88bfa673b1f577255) in parent group-v496304. [ 593.863766] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Creating folder: Instances. Parent ref: group-v496353. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.863908] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44348fa8-96ec-4a46-bfe4-10887dda6365 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.866872] env[62383]: DEBUG oslo_vmware.api [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 593.866872] env[62383]: value = "task-2451069" [ 593.866872] env[62383]: _type = "Task" [ 593.866872] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.875182] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Created folder: Instances in parent group-v496353. [ 593.875182] env[62383]: DEBUG oslo.service.loopingcall [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 593.880404] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 593.880691] env[62383]: DEBUG oslo_vmware.api [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451069, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.881344] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7b0ab1d-0330-4313-a9f4-897a65650c35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.895850] env[62383]: DEBUG oslo_vmware.rw_handles [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Completed reading data from the image iterator. {{(pid=62383) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 593.895850] env[62383]: DEBUG oslo_vmware.rw_handles [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 593.901660] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 593.901660] env[62383]: value = "task-2451071" [ 593.901660] env[62383]: _type = "Task" [ 593.901660] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.908935] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451071, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.932253] env[62383]: DEBUG nova.network.neutron [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Updated VIF entry in instance network info cache for port f6691e8a-b0ad-4db1-b2f2-c313f3c50a51. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 593.932663] env[62383]: DEBUG nova.network.neutron [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Updating instance_info_cache with network_info: [{"id": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "address": "fa:16:3e:e3:87:1e", "network": {"id": "86f13b93-2f69-4ec7-b838-5d8cb11a1051", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1138221332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "304a62370f8149049a797eb7077e910b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6691e8a-b0", "ovs_interfaceid": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.996550] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.999088] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 593.999088] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d6158cf-d1cc-4476-851b-7fb8ebd10d08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.006492] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 594.006492] env[62383]: value = "task-2451072" [ 594.006492] env[62383]: _type = "Task" [ 594.006492] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.016618] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.050407] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Downloaded image file data cac3b430-a1d5-4ad1-92ec-34c2261779a8 to vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk on the data store datastore1 {{(pid=62383) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 594.053969] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Caching image {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 594.054411] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Copying Virtual Disk [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk to [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 594.054884] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91fecf00-711e-4c37-8ff6-cca82ba3aee5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.063493] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 594.063493] env[62383]: value = "task-2451073" [ 594.063493] env[62383]: _type = "Task" [ 594.063493] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.076169] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.096344] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.360s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.097995] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.152s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.099770] env[62383]: INFO nova.compute.claims [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 594.135636] env[62383]: INFO nova.scheduler.client.report [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Deleted allocations for instance 9659a2dd-f1da-4a8e-a740-1ec01f96940c [ 594.379774] env[62383]: DEBUG oslo_vmware.api [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451069, 'name': PowerOffVM_Task, 'duration_secs': 0.28641} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.382508] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 594.383708] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 594.383708] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94aac99c-2258-48e3-996e-326356c7d203 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.417041] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451071, 'name': CreateVM_Task, 'duration_secs': 0.38247} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.417655] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 594.417655] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.417809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.418230] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 594.422450] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba8d474f-d058-4bdf-a6df-dcf0b5700f02 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.428824] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 594.428824] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526114cd-71da-5dbd-4846-0f6ff4427bd1" [ 594.428824] env[62383]: _type = "Task" [ 594.428824] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.436934] env[62383]: DEBUG oslo_concurrency.lockutils [req-d25c1759-cf46-4cd3-b949-ca768d544cec req-00a4094f-6fb0-4a40-a9b3-9b6b1c032a7a service nova] Releasing lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.451875] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.451875] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 594.451875] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.458201] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 594.458201] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 594.458201] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleting the datastore file [datastore2] a170fd95-3f7f-4315-a063-b9d02a7a1af4 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 594.458201] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-373aa05b-eca3-41ab-bd1a-2620a3624884 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.464997] env[62383]: DEBUG oslo_vmware.api [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 594.464997] env[62383]: value = "task-2451075" [ 594.464997] env[62383]: _type = "Task" [ 594.464997] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.471432] env[62383]: DEBUG oslo_vmware.api [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.519201] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451072, 'name': PowerOffVM_Task, 'duration_secs': 0.226273} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.520086] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 594.520086] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance 'a8d56b8e-fa11-4844-ab65-a2e5d24b1e07' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 594.584886] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451073, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.647703] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abdddd77-c17b-47d7-bfc6-dd6f7c0b969d tempest-ServerExternalEventsTest-1744659003 tempest-ServerExternalEventsTest-1744659003-project-member] Lock "9659a2dd-f1da-4a8e-a740-1ec01f96940c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.525s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.976810] env[62383]: DEBUG oslo_vmware.api [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335653} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.977187] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 594.977396] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 594.977595] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 594.977751] env[62383]: INFO nova.compute.manager [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Took 1.14 seconds to destroy the instance on the hypervisor. [ 594.978020] env[62383]: DEBUG oslo.service.loopingcall [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.978204] env[62383]: DEBUG nova.compute.manager [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 594.978300] env[62383]: DEBUG nova.network.neutron [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 595.029560] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 595.029560] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.030288] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 595.030288] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.030288] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 595.031485] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 595.032205] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 595.032575] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 595.032923] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 595.032923] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 595.033106] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 595.043602] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50f20479-6814-4dc2-8e40-2d2c42444f40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.057705] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "17498cb6-8b16-4a2e-96ae-c594966cee77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.058277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.065151] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 595.065151] env[62383]: value = "task-2451076" [ 595.065151] env[62383]: _type = "Task" [ 595.065151] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.079065] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451076, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.082676] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.780154} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.082932] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Copied Virtual Disk [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk to [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 595.083133] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleting the datastore file [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8/tmp-sparse.vmdk {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 595.083374] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf62c729-9c04-445f-b1c3-ae2e9dfb9b0a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.089343] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 595.089343] env[62383]: value = "task-2451077" [ 595.089343] env[62383]: _type = "Task" [ 595.089343] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.098034] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.583207] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451076, 'name': ReconfigVM_Task, 'duration_secs': 0.227853} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.583425] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance 'a8d56b8e-fa11-4844-ab65-a2e5d24b1e07' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 595.598234] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062556} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.598494] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 595.598706] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Moving file from [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5/cac3b430-a1d5-4ad1-92ec-34c2261779a8 to [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8. {{(pid=62383) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 595.598948] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-5d0ecbf6-a56a-4716-b0f6-4a64374f0274 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.609062] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 595.609062] env[62383]: value = "task-2451078" [ 595.609062] env[62383]: _type = "Task" [ 595.609062] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.617263] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451078, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.700015] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.700253] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.738986] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe284c4-b81e-4a69-986a-caf9819b5f52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.746802] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccae811-d726-4958-8e1d-8bd668acc318 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.786206] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e30ade4-74de-40ab-a905-9d70b9561c7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.797404] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c2acc9-0772-4998-b322-8fb157c3bc9f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.815483] env[62383]: DEBUG nova.compute.provider_tree [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 596.021474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "a16193af-410e-4bf6-bb06-a97791cf6060" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.021933] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a16193af-410e-4bf6-bb06-a97791cf6060" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.095176] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 596.095176] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 596.095176] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 596.095176] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 596.095355] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 596.095355] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 596.095355] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 596.095355] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 596.095355] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 596.095514] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 596.095964] env[62383]: DEBUG nova.virt.hardware [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 596.103305] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Reconfiguring VM instance instance-00000005 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 596.109015] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e74aca1-ca98-41aa-8b8c-db7b4d3c1a18 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.137085] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451078, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.029374} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.137085] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 596.137085] env[62383]: value = "task-2451079" [ 596.137085] env[62383]: _type = "Task" [ 596.137085] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.137417] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] File moved {{(pid=62383) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 596.137953] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Cleaning up location [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 596.138209] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleting the datastore file [datastore1] vmware_temp/1c2558c3-4a66-4bb8-ad8e-0549871dccb5 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 596.138565] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d70500df-6cd8-4885-a496-2235bea63699 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.150789] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451079, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.155267] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 596.155267] env[62383]: value = "task-2451080" [ 596.155267] env[62383]: _type = "Task" [ 596.155267] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.163993] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.303182] env[62383]: DEBUG nova.network.neutron [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.321065] env[62383]: DEBUG nova.scheduler.client.report [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 596.650676] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451079, 'name': ReconfigVM_Task, 'duration_secs': 0.181828} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.653690] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Reconfigured VM instance instance-00000005 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 596.654639] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff9dc7e-60f9-4cb6-bf24-2d75341146db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.670087] env[62383]: DEBUG nova.compute.manager [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Received event network-changed-cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 596.670298] env[62383]: DEBUG nova.compute.manager [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Refreshing instance network info cache due to event network-changed-cc45bc62-e82d-40dc-b803-56b790aca5d4. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 596.670519] env[62383]: DEBUG oslo_concurrency.lockutils [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] Acquiring lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.672298] env[62383]: DEBUG oslo_concurrency.lockutils [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] Acquired lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.672298] env[62383]: DEBUG nova.network.neutron [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Refreshing network info cache for port cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 596.702345] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] a8d56b8e-fa11-4844-ab65-a2e5d24b1e07/a8d56b8e-fa11-4844-ab65-a2e5d24b1e07.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 596.702746] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041011} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.704250] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05668545-75c1-43b7-b470-58492af60d34 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.717720] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 596.720258] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9fd17d3-4919-419d-9162-471512d8f6c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.726955] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 596.726955] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5200ced3-7589-ad91-70f6-17ed15b4a26c" [ 596.726955] env[62383]: _type = "Task" [ 596.726955] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.728735] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 596.728735] env[62383]: value = "task-2451081" [ 596.728735] env[62383]: _type = "Task" [ 596.728735] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.744601] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451081, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.744601] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5200ced3-7589-ad91-70f6-17ed15b4a26c, 'name': SearchDatastore_Task, 'duration_secs': 0.011505} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.745474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.745474] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 80821717-f961-49c7-8b79-c152edfdfb94/80821717-f961-49c7-8b79-c152edfdfb94.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.745474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.745675] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 596.748277] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-acbf81f7-f180-479c-a53c-12488d448232 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.748277] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62ef7381-8116-48eb-969c-8ea8b06c35bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.754710] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 596.754710] env[62383]: value = "task-2451082" [ 596.754710] env[62383]: _type = "Task" [ 596.754710] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.760893] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 596.761289] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 596.762863] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c257de1c-46b3-4e83-8b50-00b63bdc628b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.773299] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.774683] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 596.774683] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52cde321-6212-c4f7-601f-dfc600a16790" [ 596.774683] env[62383]: _type = "Task" [ 596.774683] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.782157] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cde321-6212-c4f7-601f-dfc600a16790, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.806681] env[62383]: INFO nova.compute.manager [-] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Took 1.83 seconds to deallocate network for instance. [ 596.825878] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.826443] env[62383]: DEBUG nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 596.828981] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.630s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.830402] env[62383]: INFO nova.compute.claims [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 597.207432] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "0c01a974-2318-461b-965f-ba4932e3bea1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.208371] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.243246] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451081, 'name': ReconfigVM_Task, 'duration_secs': 0.444643} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.243614] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Reconfigured VM instance instance-00000005 to attach disk [datastore2] a8d56b8e-fa11-4844-ab65-a2e5d24b1e07/a8d56b8e-fa11-4844-ab65-a2e5d24b1e07.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 597.244427] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance 'a8d56b8e-fa11-4844-ab65-a2e5d24b1e07' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 597.265322] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497462} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.265581] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 80821717-f961-49c7-8b79-c152edfdfb94/80821717-f961-49c7-8b79-c152edfdfb94.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.265866] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 597.266148] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-100e4942-6e70-4471-9022-a7f9da1ca627 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.274907] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 597.274907] env[62383]: value = "task-2451083" [ 597.274907] env[62383]: _type = "Task" [ 597.274907] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.287590] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451083, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.293137] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cde321-6212-c4f7-601f-dfc600a16790, 'name': SearchDatastore_Task, 'duration_secs': 0.018844} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.294486] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e813164-c90f-404d-8326-4b26f789f184 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.300094] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 597.300094] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527571f1-71e1-2cc7-00fb-c82169abb25a" [ 597.300094] env[62383]: _type = "Task" [ 597.300094] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.309057] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527571f1-71e1-2cc7-00fb-c82169abb25a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.316791] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.334730] env[62383]: DEBUG nova.compute.utils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 597.339620] env[62383]: DEBUG nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 597.339928] env[62383]: DEBUG nova.network.neutron [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 597.398326] env[62383]: DEBUG nova.compute.manager [req-ccb85bc4-d1d6-4b85-a948-61d414522423 req-714004e4-aefb-4fe0-95d4-f21dd0ffd23d service nova] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Received event network-vif-deleted-d1a6d806-7723-4d98-843f-fe34d9a9f94c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 597.427092] env[62383]: DEBUG nova.policy [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b7c90e0837e49c0974d6ae8a206eab8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6a704dc4ad9a49d98aa13af301548464', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 597.618699] env[62383]: DEBUG nova.network.neutron [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Updated VIF entry in instance network info cache for port cc45bc62-e82d-40dc-b803-56b790aca5d4. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 597.619266] env[62383]: DEBUG nova.network.neutron [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Updating instance_info_cache with network_info: [{"id": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "address": "fa:16:3e:c5:92:47", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc45bc62-e8", "ovs_interfaceid": "cc45bc62-e82d-40dc-b803-56b790aca5d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.752042] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee01d51-aa6c-48f7-a16a-4671e6421cf3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.760248] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.760479] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.782499] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2d25bc-8b5d-44a4-ada9-f6e1a0d087c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.800292] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451083, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083364} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.814611] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 597.815376] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance 'a8d56b8e-fa11-4844-ab65-a2e5d24b1e07' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 597.823364] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b006ed4-6355-41f7-a6c7-b1d23a2660c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.833900] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527571f1-71e1-2cc7-00fb-c82169abb25a, 'name': SearchDatastore_Task, 'duration_secs': 0.010382} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.844491] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.845773] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.845773] env[62383]: DEBUG nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 597.866133] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 80821717-f961-49c7-8b79-c152edfdfb94/80821717-f961-49c7-8b79-c152edfdfb94.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 597.871184] env[62383]: DEBUG nova.network.neutron [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Successfully created port: a993c45d-5e8d-4cd7-b51a-991b816fa089 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.873301] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35a6c91d-04b3-40b3-ab56-28bd68acacdd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.876104] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce5fb5e7-270b-467f-906f-c01a39ca3c29 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.896926] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 597.896926] env[62383]: value = "task-2451085" [ 597.896926] env[62383]: _type = "Task" [ 597.896926] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.898447] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 597.898447] env[62383]: value = "task-2451084" [ 597.898447] env[62383]: _type = "Task" [ 597.898447] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.918219] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.918841] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.123591] env[62383]: DEBUG oslo_concurrency.lockutils [req-a242efc4-9572-4b45-b2d7-ab5e76dbbb56 req-bd4fc2d0-7b80-40cf-a362-5a4f1b6dc8aa service nova] Releasing lock "refresh_cache-67d41910-54e1-48f1-b0d3-f34a62595ef2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.421179] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451084, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.422213] env[62383]: DEBUG nova.network.neutron [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Port 5205d6ef-091d-4460-bd6c-3b1c5873c3ea binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 598.427682] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.543908] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.544205] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.544409] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.544587] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.544772] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.549644] env[62383]: INFO nova.compute.manager [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Terminating instance [ 598.612066] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f583815-a16f-4b20-b4bf-0c4243f81a9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.619840] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1064df-4b90-411d-a590-4ee10f038c20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.651873] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d3a0e7-01b8-4dcc-a081-72e755ab9ecc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.659351] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c6bc32-8865-409f-bf8e-e6a398948dbe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.674017] env[62383]: DEBUG nova.compute.provider_tree [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.857700] env[62383]: DEBUG nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 598.896930] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 598.897177] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.897688] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 598.897688] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.897688] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 598.897872] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 598.897985] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 598.898218] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 598.898315] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 598.898519] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 598.900034] env[62383]: DEBUG nova.virt.hardware [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 598.900034] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd542fe-a0eb-470f-accc-676bd8545240 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.912909] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451085, 'name': ReconfigVM_Task, 'duration_secs': 1.009055} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.917046] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 80821717-f961-49c7-8b79-c152edfdfb94/80821717-f961-49c7-8b79-c152edfdfb94.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 598.917629] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8207ca63-c460-4635-b85f-2a3dbff4bf70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.921501] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31be3020-6f27-4bcd-a5fb-bccf7ba91924 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.944864] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 598.944864] env[62383]: value = "task-2451086" [ 598.944864] env[62383]: _type = "Task" [ 598.944864] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.945571] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451084, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588518} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.946075] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.946323] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.949708] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd50147b-1547-4150-809f-ca28a3b96fda {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.957537] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 598.957537] env[62383]: value = "task-2451087" [ 598.957537] env[62383]: _type = "Task" [ 598.957537] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.965063] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451087, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.014067] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.014501] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.061831] env[62383]: DEBUG nova.compute.manager [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 599.062074] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 599.063151] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73193fce-ec8f-4c70-b773-0fa564d4adce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.072776] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 599.073367] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66102033-bb64-4104-b419-23456a221abf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.080239] env[62383]: DEBUG oslo_vmware.api [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 599.080239] env[62383]: value = "task-2451088" [ 599.080239] env[62383]: _type = "Task" [ 599.080239] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.088942] env[62383]: DEBUG oslo_vmware.api [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.177150] env[62383]: DEBUG nova.scheduler.client.report [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 599.475519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.475519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.476822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.488323] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451086, 'name': Rename_Task, 'duration_secs': 0.368467} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.488748] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451087, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.273648} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.489049] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 599.489706] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 599.490713] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1033bcb7-ad3a-4faf-9ad1-a334aee61e0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.494372] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c17f91-d4b6-4cec-9f40-91325188b33e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.531378] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 599.533672] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7db7cf3-2a86-4621-91da-e2006da11c42 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.550061] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 599.550061] env[62383]: value = "task-2451089" [ 599.550061] env[62383]: _type = "Task" [ 599.550061] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.556344] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 599.556344] env[62383]: value = "task-2451090" [ 599.556344] env[62383]: _type = "Task" [ 599.556344] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.563660] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451089, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.569498] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451090, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.589904] env[62383]: DEBUG oslo_vmware.api [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451088, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.655953] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.656207] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.687491] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.858s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.688259] env[62383]: DEBUG nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 599.692246] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.243s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.695209] env[62383]: INFO nova.compute.claims [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 599.960379] env[62383]: DEBUG nova.compute.manager [req-06bd2133-ed6b-4386-a5a5-fa0880d66b5f req-61602157-39e3-4c45-bbd4-645a14cd40d2 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Received event network-vif-plugged-a993c45d-5e8d-4cd7-b51a-991b816fa089 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 599.960719] env[62383]: DEBUG oslo_concurrency.lockutils [req-06bd2133-ed6b-4386-a5a5-fa0880d66b5f req-61602157-39e3-4c45-bbd4-645a14cd40d2 service nova] Acquiring lock "184d0caa-85c2-426d-82e5-ac52e525fe74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.960829] env[62383]: DEBUG oslo_concurrency.lockutils [req-06bd2133-ed6b-4386-a5a5-fa0880d66b5f req-61602157-39e3-4c45-bbd4-645a14cd40d2 service nova] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.962050] env[62383]: DEBUG oslo_concurrency.lockutils [req-06bd2133-ed6b-4386-a5a5-fa0880d66b5f req-61602157-39e3-4c45-bbd4-645a14cd40d2 service nova] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.962050] env[62383]: DEBUG nova.compute.manager [req-06bd2133-ed6b-4386-a5a5-fa0880d66b5f req-61602157-39e3-4c45-bbd4-645a14cd40d2 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] No waiting events found dispatching network-vif-plugged-a993c45d-5e8d-4cd7-b51a-991b816fa089 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 599.962219] env[62383]: WARNING nova.compute.manager [req-06bd2133-ed6b-4386-a5a5-fa0880d66b5f req-61602157-39e3-4c45-bbd4-645a14cd40d2 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Received unexpected event network-vif-plugged-a993c45d-5e8d-4cd7-b51a-991b816fa089 for instance with vm_state building and task_state spawning. [ 600.063544] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451089, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.075269] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.077090] env[62383]: DEBUG nova.network.neutron [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Successfully updated port: a993c45d-5e8d-4cd7-b51a-991b816fa089 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.098580] env[62383]: DEBUG oslo_vmware.api [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451088, 'name': PowerOffVM_Task, 'duration_secs': 0.631492} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.099497] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 600.099668] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 600.100076] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98d5bc2c-294a-4f34-9626-702fd81bbf91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.189757] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 600.190080] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 600.190238] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Deleting the datastore file [datastore2] eedadcc7-d02e-4a21-a43a-1dccde81b3b4 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 600.190508] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19f0ca96-c723-4340-a561-99b14fb70bb3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.197162] env[62383]: DEBUG oslo_vmware.api [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for the task: (returnval){ [ 600.197162] env[62383]: value = "task-2451092" [ 600.197162] env[62383]: _type = "Task" [ 600.197162] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.205490] env[62383]: DEBUG nova.compute.utils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 600.210878] env[62383]: DEBUG nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Not allocating networking since 'none' was specified. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 600.217787] env[62383]: DEBUG oslo_vmware.api [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.562754] env[62383]: DEBUG oslo_vmware.api [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451089, 'name': PowerOnVM_Task, 'duration_secs': 0.822199} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.563956] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 600.564421] env[62383]: INFO nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Took 14.38 seconds to spawn the instance on the hypervisor. [ 600.565235] env[62383]: DEBUG nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.572715] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4cc63f2-aaca-4e2d-8b0f-19b8a24c0c8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.582043] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "refresh_cache-184d0caa-85c2-426d-82e5-ac52e525fe74" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.582348] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquired lock "refresh_cache-184d0caa-85c2-426d-82e5-ac52e525fe74" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.582509] env[62383]: DEBUG nova.network.neutron [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.586266] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451090, 'name': ReconfigVM_Task, 'duration_secs': 0.60292} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.589445] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.589664] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.589842] env[62383]: DEBUG nova.network.neutron [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.591196] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.597510] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2fd0def-4092-430f-8d15-01b764829e3a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.610112] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 600.610112] env[62383]: value = "task-2451093" [ 600.610112] env[62383]: _type = "Task" [ 600.610112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.619161] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451093, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.709139] env[62383]: DEBUG oslo_vmware.api [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Task: {'id': task-2451092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155431} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.709241] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 600.709441] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 600.712865] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 600.712865] env[62383]: INFO nova.compute.manager [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Took 1.65 seconds to destroy the instance on the hypervisor. [ 600.712865] env[62383]: DEBUG oslo.service.loopingcall [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.715488] env[62383]: DEBUG nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 600.718166] env[62383]: DEBUG nova.compute.manager [-] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 600.718266] env[62383]: DEBUG nova.network.neutron [-] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 600.759852] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.760188] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.760402] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.760537] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.760756] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.763855] env[62383]: INFO nova.compute.manager [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Terminating instance [ 601.123887] env[62383]: INFO nova.compute.manager [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Took 38.08 seconds to build instance. [ 601.132719] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451093, 'name': Rename_Task, 'duration_secs': 0.142351} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.135550] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 601.135550] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6aff9635-b76b-41bb-bf5e-6e13fe349c56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.144878] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 601.144878] env[62383]: value = "task-2451094" [ 601.144878] env[62383]: _type = "Task" [ 601.144878] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.157803] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.181336] env[62383]: DEBUG nova.network.neutron [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.271490] env[62383]: DEBUG nova.compute.manager [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 601.271731] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 601.275078] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f50dad8-58d8-4391-8340-51f6321a4a6c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.285021] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 601.289117] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c954f7a7-8be8-4745-8ac1-e8133bc932b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.360266] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 601.360508] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 601.360695] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Deleting the datastore file [datastore2] dd0ad4e3-a6e6-4258-b960-544984e24ebc {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 601.360983] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-617670e7-74a5-41b9-8d97-4a52cd2aedc2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.378022] env[62383]: DEBUG oslo_vmware.api [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 601.378022] env[62383]: value = "task-2451096" [ 601.378022] env[62383]: _type = "Task" [ 601.378022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.390274] env[62383]: DEBUG oslo_vmware.api [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451096, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.479385] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457cd801-276a-4c16-bfb5-42d02aeeb549 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.490946] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55afa23a-acdf-40cb-87f4-e8dccac42ce2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.542993] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6620d626-274e-444a-bc76-5c8d3932ff03 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.552032] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936b27dd-e84b-40b1-8525-623be047229a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.558246] env[62383]: DEBUG nova.network.neutron [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Updating instance_info_cache with network_info: [{"id": "a993c45d-5e8d-4cd7-b51a-991b816fa089", "address": "fa:16:3e:16:62:80", "network": {"id": "dd5f49a3-a6bc-44e2-a463-c648f7944c21", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2119121568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6a704dc4ad9a49d98aa13af301548464", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa993c45d-5e", "ovs_interfaceid": "a993c45d-5e8d-4cd7-b51a-991b816fa089", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.572716] env[62383]: DEBUG nova.compute.provider_tree [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.626308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b28dfc72-92dc-4219-9056-66f16628bf07 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.866s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.658716] env[62383]: DEBUG nova.network.neutron [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance_info_cache with network_info: [{"id": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "address": "fa:16:3e:21:59:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5205d6ef-09", "ovs_interfaceid": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.662793] env[62383]: DEBUG oslo_vmware.api [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451094, 'name': PowerOnVM_Task, 'duration_secs': 0.455589} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.663317] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 601.665017] env[62383]: INFO nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Took 7.90 seconds to spawn the instance on the hypervisor. [ 601.665017] env[62383]: DEBUG nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 601.665017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d1a284-e9de-4ba0-955d-0c1a42c2cb17 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.733887] env[62383]: DEBUG nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 601.765328] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 601.765595] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.765749] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 601.765925] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.766175] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 601.766378] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 601.766687] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 601.766863] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 601.767046] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 601.767218] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 601.767390] env[62383]: DEBUG nova.virt.hardware [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 601.768344] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6831e934-904b-4c4f-8542-1f896f7e7887 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.777666] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32210644-47c0-44bf-8ed4-c4e26029be0b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.796149] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.803025] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Creating folder: Project (80b7d62efa7c4700bea51780fe97b2f3). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.803025] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76ba18ad-7383-40ec-ab05-4aa339b0ee92 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.815501] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Created folder: Project (80b7d62efa7c4700bea51780fe97b2f3) in parent group-v496304. [ 601.815501] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Creating folder: Instances. Parent ref: group-v496356. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.815686] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eaeb1f80-1194-48e9-aba3-54ef012da0fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.825351] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Created folder: Instances in parent group-v496356. [ 601.825679] env[62383]: DEBUG oslo.service.loopingcall [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.825915] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 601.826171] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72cb376e-daab-4662-b101-767cc8c34588 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.843749] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.843749] env[62383]: value = "task-2451099" [ 601.843749] env[62383]: _type = "Task" [ 601.843749] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.852021] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451099, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.886242] env[62383]: DEBUG oslo_vmware.api [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451096, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163525} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.886662] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 601.886734] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 601.889805] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 601.889805] env[62383]: INFO nova.compute.manager [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Took 0.62 seconds to destroy the instance on the hypervisor. [ 601.889805] env[62383]: DEBUG oslo.service.loopingcall [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.889805] env[62383]: DEBUG nova.compute.manager [-] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 601.889805] env[62383]: DEBUG nova.network.neutron [-] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.892775] env[62383]: DEBUG nova.network.neutron [-] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.061493] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Releasing lock "refresh_cache-184d0caa-85c2-426d-82e5-ac52e525fe74" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.061703] env[62383]: DEBUG nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Instance network_info: |[{"id": "a993c45d-5e8d-4cd7-b51a-991b816fa089", "address": "fa:16:3e:16:62:80", "network": {"id": "dd5f49a3-a6bc-44e2-a463-c648f7944c21", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2119121568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6a704dc4ad9a49d98aa13af301548464", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa993c45d-5e", "ovs_interfaceid": "a993c45d-5e8d-4cd7-b51a-991b816fa089", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 602.062059] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:62:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad4c015b-4a8b-46ca-9556-74bad8db9fb3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a993c45d-5e8d-4cd7-b51a-991b816fa089', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 602.069452] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Creating folder: Project (6a704dc4ad9a49d98aa13af301548464). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.069785] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-144cdeec-4248-43d4-9ccf-bf8cff40cdd8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.074713] env[62383]: DEBUG nova.scheduler.client.report [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 602.083552] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Created folder: Project (6a704dc4ad9a49d98aa13af301548464) in parent group-v496304. [ 602.083742] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Creating folder: Instances. Parent ref: group-v496359. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.083964] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18016cc4-c6f2-482d-9de4-ae47e13900e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.096582] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Created folder: Instances in parent group-v496359. [ 602.096817] env[62383]: DEBUG oslo.service.loopingcall [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 602.097009] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.097209] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-382579b9-1c7a-4e0c-b696-1cb5576a59f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.120349] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.120349] env[62383]: value = "task-2451102" [ 602.120349] env[62383]: _type = "Task" [ 602.120349] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.129317] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451102, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.131820] env[62383]: DEBUG nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.164872] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.195457] env[62383]: INFO nova.compute.manager [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Took 37.23 seconds to build instance. [ 602.353859] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451099, 'name': CreateVM_Task, 'duration_secs': 0.311218} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.354061] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 602.354494] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.354653] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.355069] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 602.355317] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ca35fd4-e65a-4ec7-baab-22fedd95c7d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.360189] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 602.360189] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d0f373-c496-9398-1f57-dca581f37fce" [ 602.360189] env[62383]: _type = "Task" [ 602.360189] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.368682] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d0f373-c496-9398-1f57-dca581f37fce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.395828] env[62383]: INFO nova.compute.manager [-] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Took 1.68 seconds to deallocate network for instance. [ 602.528398] env[62383]: DEBUG nova.compute.manager [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Received event network-changed-a993c45d-5e8d-4cd7-b51a-991b816fa089 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 602.529020] env[62383]: DEBUG nova.compute.manager [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Refreshing instance network info cache due to event network-changed-a993c45d-5e8d-4cd7-b51a-991b816fa089. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 602.529476] env[62383]: DEBUG oslo_concurrency.lockutils [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] Acquiring lock "refresh_cache-184d0caa-85c2-426d-82e5-ac52e525fe74" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.530173] env[62383]: DEBUG oslo_concurrency.lockutils [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] Acquired lock "refresh_cache-184d0caa-85c2-426d-82e5-ac52e525fe74" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.530773] env[62383]: DEBUG nova.network.neutron [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Refreshing network info cache for port a993c45d-5e8d-4cd7-b51a-991b816fa089 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 602.582663] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.887s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.582663] env[62383]: DEBUG nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 602.585867] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.963s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.592976] env[62383]: INFO nova.compute.claims [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.634037] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451102, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.656077] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.690168] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408bf377-93a0-4d75-8841-698dbc624205 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.711368] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4c454f7b-6ed4-4f7d-8b6a-80797329b99f tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "69569fa0-5175-453e-9875-9ef46c723da8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.760s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.715418] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971b1800-8c35-4c52-abc7-c14480d0f517 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.723117] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance 'a8d56b8e-fa11-4844-ab65-a2e5d24b1e07' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 602.870911] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d0f373-c496-9398-1f57-dca581f37fce, 'name': SearchDatastore_Task, 'duration_secs': 0.0128} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.871237] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.871467] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.871704] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.871925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.872123] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.872384] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41075f6a-06df-4e4f-9a42-c3c41e42000d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.880754] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.880955] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.881704] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8539d9e9-19ba-427c-a086-ec8e2188ee04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.886803] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 602.886803] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ad5504-e8a1-a3df-30ec-8042f396ef40" [ 602.886803] env[62383]: _type = "Task" [ 602.886803] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.894604] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ad5504-e8a1-a3df-30ec-8042f396ef40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.902648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.095261] env[62383]: DEBUG nova.compute.utils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.099801] env[62383]: DEBUG nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 603.099801] env[62383]: DEBUG nova.network.neutron [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.134458] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451102, 'name': CreateVM_Task, 'duration_secs': 0.539876} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.134683] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 603.135596] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.135801] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.136185] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 603.136786] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fa2d508-c320-40a4-8f79-09254c279d4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.142069] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 603.142069] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e48c3c-ac76-92ec-4f64-f71c4a9562f1" [ 603.142069] env[62383]: _type = "Task" [ 603.142069] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.151237] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e48c3c-ac76-92ec-4f64-f71c4a9562f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.154496] env[62383]: DEBUG nova.policy [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df4a52739e5a42f890af9e64958b013b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1266b6ffcfb4e658731a9e3345e6789', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 603.219800] env[62383]: DEBUG nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 603.236731] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 603.237103] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a677c08-6e47-4329-8ad5-ce1fed9eeac1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.246315] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 603.246315] env[62383]: value = "task-2451103" [ 603.246315] env[62383]: _type = "Task" [ 603.246315] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.258066] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.275855] env[62383]: DEBUG nova.network.neutron [-] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.397382] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ad5504-e8a1-a3df-30ec-8042f396ef40, 'name': SearchDatastore_Task, 'duration_secs': 0.008101} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.398265] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d2ca9aa-227a-4844-bfb3-efc6b51fda86 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.403325] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 603.403325] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5208510a-2314-f566-20fa-a733c0066787" [ 603.403325] env[62383]: _type = "Task" [ 603.403325] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.411199] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5208510a-2314-f566-20fa-a733c0066787, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.582165] env[62383]: DEBUG nova.network.neutron [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Updated VIF entry in instance network info cache for port a993c45d-5e8d-4cd7-b51a-991b816fa089. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 603.582165] env[62383]: DEBUG nova.network.neutron [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Updating instance_info_cache with network_info: [{"id": "a993c45d-5e8d-4cd7-b51a-991b816fa089", "address": "fa:16:3e:16:62:80", "network": {"id": "dd5f49a3-a6bc-44e2-a463-c648f7944c21", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-2119121568-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6a704dc4ad9a49d98aa13af301548464", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad4c015b-4a8b-46ca-9556-74bad8db9fb3", "external-id": "nsx-vlan-transportzone-246", "segmentation_id": 246, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa993c45d-5e", "ovs_interfaceid": "a993c45d-5e8d-4cd7-b51a-991b816fa089", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.600412] env[62383]: DEBUG nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 603.654438] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e48c3c-ac76-92ec-4f64-f71c4a9562f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009106} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.657929] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.658266] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 603.658560] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.676349] env[62383]: INFO nova.compute.manager [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Rebuilding instance [ 603.741994] env[62383]: DEBUG nova.compute.manager [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 603.743064] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad83ac8-df71-426c-91ed-4276a0ebc49d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.747310] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.763467] env[62383]: DEBUG oslo_vmware.api [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451103, 'name': PowerOnVM_Task, 'duration_secs': 0.497679} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.763603] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 603.763652] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3d0cf0-30bd-40bc-95dd-fef53239e9cb tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance 'a8d56b8e-fa11-4844-ab65-a2e5d24b1e07' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 603.771441] env[62383]: DEBUG nova.compute.manager [req-6217507b-f6da-4d2e-9ca6-3ec90fca43c7 req-defa315d-15a3-4074-85b9-3b9cf346648a service nova] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Received event network-vif-deleted-7969303b-5cee-496e-841d-a0a254ed01e3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 603.784023] env[62383]: INFO nova.compute.manager [-] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Took 1.89 seconds to deallocate network for instance. [ 603.856721] env[62383]: DEBUG nova.network.neutron [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Successfully created port: c5143583-b4ea-45e7-9c76-40bb80e9b004 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.915459] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5208510a-2314-f566-20fa-a733c0066787, 'name': SearchDatastore_Task, 'duration_secs': 0.011844} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.915955] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.916368] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0d992155-24fa-4836-83c9-8f188f7d7efa/0d992155-24fa-4836-83c9-8f188f7d7efa.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 603.916687] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.916804] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.917037] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56a73acf-85c9-49a5-80b5-0b7f8a689b97 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.919711] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06558b13-e721-4a7d-be52-b7d90be561d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.926156] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 603.926156] env[62383]: value = "task-2451104" [ 603.926156] env[62383]: _type = "Task" [ 603.926156] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.932721] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.932914] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 603.934269] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d86de26-f6b8-4100-b1cb-abe71ada8df0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.939516] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.942590] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 603.942590] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fbd22a-b977-2bce-7685-fe5140c38400" [ 603.942590] env[62383]: _type = "Task" [ 603.942590] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.950470] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fbd22a-b977-2bce-7685-fe5140c38400, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.084334] env[62383]: DEBUG oslo_concurrency.lockutils [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] Releasing lock "refresh_cache-184d0caa-85c2-426d-82e5-ac52e525fe74" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.084651] env[62383]: DEBUG nova.compute.manager [req-d2caf3b9-538f-4d90-9cde-1203e588ce04 req-fa6f70aa-b25d-415b-838e-30a3a72480b7 service nova] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Received event network-vif-deleted-f072e9db-418e-4a2d-a8a0-3d6d74444bb7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 604.295473] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.341301] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e8b1b4-a6ba-4a1b-a47e-3ac11db0df42 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.352094] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23355934-d91a-4640-93f2-2f58a71dba95 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.395458] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e97a06f-9ba9-4ce9-a636-156b9c1d13ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.405496] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff2e58f-3f2f-4b75-8c7b-326f4cd2cbe2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.424138] env[62383]: DEBUG nova.compute.provider_tree [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.437434] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451104, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.452952] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fbd22a-b977-2bce-7685-fe5140c38400, 'name': SearchDatastore_Task, 'duration_secs': 0.017089} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.453790] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f3cb3c-299b-4c23-8d49-9aad36be74b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.459717] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 604.459717] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52df1cc6-1e37-b165-2708-83a972dfa7e2" [ 604.459717] env[62383]: _type = "Task" [ 604.459717] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.467628] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52df1cc6-1e37-b165-2708-83a972dfa7e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.616944] env[62383]: DEBUG nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 604.650189] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 604.650383] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 604.650489] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 604.650671] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 604.650845] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 604.651047] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 604.651317] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 604.651495] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 604.651673] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 604.651839] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 604.652035] env[62383]: DEBUG nova.virt.hardware [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 604.652925] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72f4ff3-15b4-4dcb-b856-e45792f3d096 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.669054] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd59c1e-2b81-4e1e-b978-9f8642639748 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.764973] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 604.765348] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2c15f9e-be13-49f9-8641-a442cae56342 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.773124] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 604.773124] env[62383]: value = "task-2451105" [ 604.773124] env[62383]: _type = "Task" [ 604.773124] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.784326] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.926233] env[62383]: DEBUG nova.scheduler.client.report [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 604.949196] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544581} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.949505] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0d992155-24fa-4836-83c9-8f188f7d7efa/0d992155-24fa-4836-83c9-8f188f7d7efa.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 604.949780] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 604.950066] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4d05a5d-15df-4f48-ad8c-b8623da6d51a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.958452] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 604.958452] env[62383]: value = "task-2451106" [ 604.958452] env[62383]: _type = "Task" [ 604.958452] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.970613] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451106, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.974972] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52df1cc6-1e37-b165-2708-83a972dfa7e2, 'name': SearchDatastore_Task, 'duration_secs': 0.012404} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.975248] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.975526] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 184d0caa-85c2-426d-82e5-ac52e525fe74/184d0caa-85c2-426d-82e5-ac52e525fe74.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.975789] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74d39804-291d-4d21-9067-1c08421dd845 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.981686] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 604.981686] env[62383]: value = "task-2451107" [ 604.981686] env[62383]: _type = "Task" [ 604.981686] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.989774] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451107, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.292634] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451105, 'name': PowerOffVM_Task, 'duration_secs': 0.128291} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.293113] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 605.294671] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.296682] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a20b8be-adc7-4f3a-babf-49f121c0cda8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.304858] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 605.305322] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c1937a8-af79-4695-8a36-9011af909834 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.336148] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 605.336276] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 605.336376] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Deleting the datastore file [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 605.336632] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ef7949d-5bde-471f-bf1d-8d6faa0e2dde {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.344051] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 605.344051] env[62383]: value = "task-2451109" [ 605.344051] env[62383]: _type = "Task" [ 605.344051] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.353862] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.439505] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.854s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.440068] env[62383]: DEBUG nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 605.444351] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.051s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.446341] env[62383]: INFO nova.compute.claims [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.473493] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451106, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07002} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.474949] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 605.479342] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa51f36-5abe-467f-a50f-5a07fd92e89a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.504802] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451107, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.518528] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 0d992155-24fa-4836-83c9-8f188f7d7efa/0d992155-24fa-4836-83c9-8f188f7d7efa.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 605.518528] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37ffc44b-0e9b-4433-ae2e-f8f1f15ca15b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.545330] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 605.545330] env[62383]: value = "task-2451110" [ 605.545330] env[62383]: _type = "Task" [ 605.545330] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.557401] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451110, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.856911] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338187} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.856911] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 605.856911] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 605.857406] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 605.952019] env[62383]: DEBUG nova.compute.utils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 605.952334] env[62383]: DEBUG nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 605.952535] env[62383]: DEBUG nova.network.neutron [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 605.994950] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451107, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.895386} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.995353] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 184d0caa-85c2-426d-82e5-ac52e525fe74/184d0caa-85c2-426d-82e5-ac52e525fe74.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.995651] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 605.996058] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6876e0f6-73af-4f62-b1fc-361057a12934 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.007160] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 606.007160] env[62383]: value = "task-2451111" [ 606.007160] env[62383]: _type = "Task" [ 606.007160] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.017570] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.038251] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.038490] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.055021] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.095181] env[62383]: DEBUG nova.network.neutron [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Successfully updated port: c5143583-b4ea-45e7-9c76-40bb80e9b004 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 606.129491] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.129708] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.129903] env[62383]: DEBUG nova.compute.manager [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Going to confirm migration 1 {{(pid=62383) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 606.142466] env[62383]: DEBUG nova.policy [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a18f5f5e81ac41f6a72b6d0cbe549242', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbff513aaffd4e61a7607c7655cecfcd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 606.456046] env[62383]: DEBUG nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 606.522296] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075531} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.523064] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 606.524134] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77e10f7-0e03-4d42-81e7-211f4c1beb2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.553646] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] 184d0caa-85c2-426d-82e5-ac52e525fe74/184d0caa-85c2-426d-82e5-ac52e525fe74.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 606.556866] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b249b28a-5c60-478a-80a1-989130adcd29 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.588104] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451110, 'name': ReconfigVM_Task, 'duration_secs': 0.556928} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.591973] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 0d992155-24fa-4836-83c9-8f188f7d7efa/0d992155-24fa-4836-83c9-8f188f7d7efa.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 606.593112] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 606.593112] env[62383]: value = "task-2451112" [ 606.593112] env[62383]: _type = "Task" [ 606.593112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.593395] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4983ac0-c5de-451a-b015-00cc47db65d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.595935] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "refresh_cache-0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.595935] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquired lock "refresh_cache-0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.595935] env[62383]: DEBUG nova.network.neutron [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.606516] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.610408] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 606.610408] env[62383]: value = "task-2451113" [ 606.610408] env[62383]: _type = "Task" [ 606.610408] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.620916] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451113, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.730316] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.730514] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.730694] env[62383]: DEBUG nova.network.neutron [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 606.730876] env[62383]: DEBUG nova.objects.instance [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lazy-loading 'info_cache' on Instance uuid a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 606.732860] env[62383]: DEBUG nova.network.neutron [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Successfully created port: 99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.836566] env[62383]: DEBUG nova.compute.manager [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Received event network-changed-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 606.836784] env[62383]: DEBUG nova.compute.manager [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Refreshing instance network info cache due to event network-changed-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 606.837693] env[62383]: DEBUG oslo_concurrency.lockutils [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] Acquiring lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 606.837693] env[62383]: DEBUG oslo_concurrency.lockutils [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] Acquired lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.837693] env[62383]: DEBUG nova.network.neutron [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Refreshing network info cache for port f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.902789] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 606.903104] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.903186] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 606.903364] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.903523] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 606.903916] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 606.903916] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 606.904125] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 606.904248] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 606.904382] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 606.905115] env[62383]: DEBUG nova.virt.hardware [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 606.905453] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3659b743-53d8-4e2f-b3e4-cce66ab2f223 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.918579] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a71ddc-da44-4de4-b3c2-1f6b60beb09a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.928678] env[62383]: DEBUG nova.compute.manager [req-a51cad91-c0d1-4483-93ef-7923d3bd5a7c req-ec056a74-199c-478c-89b2-8f7ffe0b7336 service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Received event network-vif-plugged-c5143583-b4ea-45e7-9c76-40bb80e9b004 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 606.928921] env[62383]: DEBUG oslo_concurrency.lockutils [req-a51cad91-c0d1-4483-93ef-7923d3bd5a7c req-ec056a74-199c-478c-89b2-8f7ffe0b7336 service nova] Acquiring lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.929147] env[62383]: DEBUG oslo_concurrency.lockutils [req-a51cad91-c0d1-4483-93ef-7923d3bd5a7c req-ec056a74-199c-478c-89b2-8f7ffe0b7336 service nova] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.929311] env[62383]: DEBUG oslo_concurrency.lockutils [req-a51cad91-c0d1-4483-93ef-7923d3bd5a7c req-ec056a74-199c-478c-89b2-8f7ffe0b7336 service nova] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.931020] env[62383]: DEBUG nova.compute.manager [req-a51cad91-c0d1-4483-93ef-7923d3bd5a7c req-ec056a74-199c-478c-89b2-8f7ffe0b7336 service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] No waiting events found dispatching network-vif-plugged-c5143583-b4ea-45e7-9c76-40bb80e9b004 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 606.931020] env[62383]: WARNING nova.compute.manager [req-a51cad91-c0d1-4483-93ef-7923d3bd5a7c req-ec056a74-199c-478c-89b2-8f7ffe0b7336 service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Received unexpected event network-vif-plugged-c5143583-b4ea-45e7-9c76-40bb80e9b004 for instance with vm_state building and task_state spawning. [ 606.938336] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 606.943908] env[62383]: DEBUG oslo.service.loopingcall [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.946787] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 606.947432] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85969661-5793-4371-83e5-d749a2fb6b40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.974043] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 606.974043] env[62383]: value = "task-2451114" [ 606.974043] env[62383]: _type = "Task" [ 606.974043] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.991262] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451114, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.112328] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.113240] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca31973-d329-4cfd-8bfd-387da72ca66d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.126637] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4916fd35-1cc4-42f3-bb85-b2879d068bdd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.129935] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451113, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.159041] env[62383]: DEBUG nova.network.neutron [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.165354] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d5ec8a-6d7e-44c1-b4f0-fc4d64001f73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.170246] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff9b10b-a775-4fef-b566-98cae2fb6082 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.187158] env[62383]: DEBUG nova.compute.provider_tree [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.318051] env[62383]: DEBUG nova.network.neutron [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Updating instance_info_cache with network_info: [{"id": "c5143583-b4ea-45e7-9c76-40bb80e9b004", "address": "fa:16:3e:06:5c:a7", "network": {"id": "777d5429-0250-4391-93ed-019a886f8d9f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-194081184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1266b6ffcfb4e658731a9e3345e6789", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5143583-b4", "ovs_interfaceid": "c5143583-b4ea-45e7-9c76-40bb80e9b004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.471208] env[62383]: DEBUG nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 607.488630] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451114, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.505791] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 607.506087] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.506361] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 607.506551] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.506754] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 607.507228] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 607.507531] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 607.507650] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 607.507827] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 607.508043] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 607.508253] env[62383]: DEBUG nova.virt.hardware [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 607.509153] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d377ef-4b8d-4a54-ab49-7b807ff38a42 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.517099] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac000f1f-ec72-4f09-ae5f-afb0da83f352 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.604842] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451112, 'name': ReconfigVM_Task, 'duration_secs': 1.005132} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.605279] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Reconfigured VM instance instance-00000013 to attach disk [datastore2] 184d0caa-85c2-426d-82e5-ac52e525fe74/184d0caa-85c2-426d-82e5-ac52e525fe74.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 607.606020] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb5ed6e5-0089-4c3e-bc6a-a34fd0aa99a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.613183] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 607.613183] env[62383]: value = "task-2451115" [ 607.613183] env[62383]: _type = "Task" [ 607.613183] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.626282] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451113, 'name': Rename_Task, 'duration_secs': 0.567038} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.631579] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 607.631847] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451115, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.632089] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4db099f7-93c7-417d-b227-9ba6398af358 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.640546] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 607.640546] env[62383]: value = "task-2451116" [ 607.640546] env[62383]: _type = "Task" [ 607.640546] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.649826] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.692041] env[62383]: DEBUG nova.scheduler.client.report [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 607.814080] env[62383]: DEBUG nova.network.neutron [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Updated VIF entry in instance network info cache for port f6691e8a-b0ad-4db1-b2f2-c313f3c50a51. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 607.814676] env[62383]: DEBUG nova.network.neutron [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Updating instance_info_cache with network_info: [{"id": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "address": "fa:16:3e:e3:87:1e", "network": {"id": "86f13b93-2f69-4ec7-b838-5d8cb11a1051", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1138221332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "304a62370f8149049a797eb7077e910b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6691e8a-b0", "ovs_interfaceid": "f6691e8a-b0ad-4db1-b2f2-c313f3c50a51", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.820961] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Releasing lock "refresh_cache-0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 607.821599] env[62383]: DEBUG nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Instance network_info: |[{"id": "c5143583-b4ea-45e7-9c76-40bb80e9b004", "address": "fa:16:3e:06:5c:a7", "network": {"id": "777d5429-0250-4391-93ed-019a886f8d9f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-194081184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1266b6ffcfb4e658731a9e3345e6789", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5143583-b4", "ovs_interfaceid": "c5143583-b4ea-45e7-9c76-40bb80e9b004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 607.821974] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:5c:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee555dfd-3d1a-4220-89cd-ffba64e4acf0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5143583-b4ea-45e7-9c76-40bb80e9b004', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.833651] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Creating folder: Project (f1266b6ffcfb4e658731a9e3345e6789). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.836344] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc740ed6-6955-48ae-9f5f-e3968e82a2e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.847548] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Created folder: Project (f1266b6ffcfb4e658731a9e3345e6789) in parent group-v496304. [ 607.847858] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Creating folder: Instances. Parent ref: group-v496363. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.848031] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-049cc40e-1fd1-4d87-8a56-b7315de6c594 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.856561] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Created folder: Instances in parent group-v496363. [ 607.856798] env[62383]: DEBUG oslo.service.loopingcall [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.856977] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 607.857193] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a49437e-a0e7-431a-936f-9e7931c68764 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.881623] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.881623] env[62383]: value = "task-2451119" [ 607.881623] env[62383]: _type = "Task" [ 607.881623] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.889668] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451119, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.988034] env[62383]: DEBUG nova.network.neutron [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance_info_cache with network_info: [{"id": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "address": "fa:16:3e:21:59:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5205d6ef-09", "ovs_interfaceid": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.989523] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451114, 'name': CreateVM_Task, 'duration_secs': 0.63813} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.990083] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 607.990496] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.990656] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.990972] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 607.994290] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e901fef-143d-45ed-8b7a-f629be906ae0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.002513] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 608.002513] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a0aabf-2c04-7dfd-4977-57bffafb3ba4" [ 608.002513] env[62383]: _type = "Task" [ 608.002513] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.010507] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a0aabf-2c04-7dfd-4977-57bffafb3ba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.022165] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "8bd05dac-7aa2-44c5-8752-6045c01d213d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.022385] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.133247] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451115, 'name': Rename_Task, 'duration_secs': 0.179466} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.133247] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 608.133559] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db3401d2-f454-4754-8b48-9575c964bcf8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.140950] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 608.140950] env[62383]: value = "task-2451120" [ 608.140950] env[62383]: _type = "Task" [ 608.140950] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.156703] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451116, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.161262] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451120, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.197753] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.198177] env[62383]: DEBUG nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 608.205796] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.860s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.206181] env[62383]: DEBUG nova.objects.instance [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lazy-loading 'resources' on Instance uuid 571a5250-8655-4f30-b193-919affbc1bd8 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 608.318558] env[62383]: DEBUG oslo_concurrency.lockutils [req-55b4e955-e65c-4e77-9d41-37f355806078 req-65c555c4-bc43-478a-a832-910233a498c1 service nova] Releasing lock "refresh_cache-80821717-f961-49c7-8b79-c152edfdfb94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.393606] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451119, 'name': CreateVM_Task, 'duration_secs': 0.346138} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.393606] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 608.393943] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.491048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.491538] env[62383]: DEBUG nova.objects.instance [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lazy-loading 'migration_context' on Instance uuid a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 608.512389] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a0aabf-2c04-7dfd-4977-57bffafb3ba4, 'name': SearchDatastore_Task, 'duration_secs': 0.011476} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.512654] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.512997] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.514605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.514605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.514605] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.514605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.514757] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 608.514757] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38254a48-8449-4ff2-96fc-8a3333ca2c3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.516990] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6986ff7f-59c1-4d0d-a7a1-c0105e0b81b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.521952] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 608.521952] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b15c8b-14ea-28fd-b431-3826fa79f782" [ 608.521952] env[62383]: _type = "Task" [ 608.521952] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.526640] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.526640] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.527293] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0278d2cc-2684-4ee5-abac-ca752ff2b820 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.532224] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b15c8b-14ea-28fd-b431-3826fa79f782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.535746] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 608.535746] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f8ba8d-43d1-29f1-efdd-4d3899398ef7" [ 608.535746] env[62383]: _type = "Task" [ 608.535746] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.543975] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f8ba8d-43d1-29f1-efdd-4d3899398ef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.614340] env[62383]: DEBUG nova.network.neutron [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Successfully updated port: 99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 608.653840] env[62383]: DEBUG oslo_vmware.api [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451116, 'name': PowerOnVM_Task, 'duration_secs': 0.528126} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.657142] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 608.657385] env[62383]: INFO nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Took 6.93 seconds to spawn the instance on the hypervisor. [ 608.657572] env[62383]: DEBUG nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 608.657858] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451120, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.658609] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7e98c8-6c95-45fe-94e9-009e44be7b07 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.709220] env[62383]: DEBUG nova.compute.utils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 608.713689] env[62383]: DEBUG nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 608.713885] env[62383]: DEBUG nova.network.neutron [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 608.798571] env[62383]: DEBUG nova.policy [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db7e9998210e485fa855f0375f63ad55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35016a724e7e4fa2b0fc19396d8e736b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 608.994835] env[62383]: DEBUG nova.objects.base [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 608.995640] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47916431-789c-44b3-b538-5d366e06506a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.020299] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5319b8c1-e931-4a24-a060-4b39b7b94e19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.046703] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b15c8b-14ea-28fd-b431-3826fa79f782, 'name': SearchDatastore_Task, 'duration_secs': 0.018187} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.047043] env[62383]: DEBUG oslo_vmware.api [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 609.047043] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f099a5-0939-6094-08b1-70f6adf70e19" [ 609.047043] env[62383]: _type = "Task" [ 609.047043] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.047311] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.047546] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.047753] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.057966] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f8ba8d-43d1-29f1-efdd-4d3899398ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.012524} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.059710] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87cf30be-0d78-4fc4-91e8-f0159007a8b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.066715] env[62383]: DEBUG oslo_vmware.api [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f099a5-0939-6094-08b1-70f6adf70e19, 'name': SearchDatastore_Task, 'duration_secs': 0.008725} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.068057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.072242] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 609.072242] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52109d4a-cd7c-f2b8-38ce-6797f0720187" [ 609.072242] env[62383]: _type = "Task" [ 609.072242] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.077448] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52109d4a-cd7c-f2b8-38ce-6797f0720187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.118200] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.118376] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquired lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.118583] env[62383]: DEBUG nova.network.neutron [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.156182] env[62383]: DEBUG oslo_vmware.api [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451120, 'name': PowerOnVM_Task, 'duration_secs': 0.527937} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.156465] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 609.156662] env[62383]: INFO nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Took 10.30 seconds to spawn the instance on the hypervisor. [ 609.156834] env[62383]: DEBUG nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.157602] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a938d889-6e0a-4f4f-b592-0356512c32c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.176236] env[62383]: INFO nova.compute.manager [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Took 33.00 seconds to build instance. [ 609.217394] env[62383]: DEBUG nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 609.239022] env[62383]: DEBUG nova.compute.manager [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Received event network-vif-plugged-99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 609.239022] env[62383]: DEBUG oslo_concurrency.lockutils [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] Acquiring lock "f28beb17-8455-49d3-8be0-7636b9abe4e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.239022] env[62383]: DEBUG oslo_concurrency.lockutils [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.239022] env[62383]: DEBUG oslo_concurrency.lockutils [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.239022] env[62383]: DEBUG nova.compute.manager [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] No waiting events found dispatching network-vif-plugged-99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 609.239571] env[62383]: WARNING nova.compute.manager [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Received unexpected event network-vif-plugged-99fff832-18f0-4caa-85b2-428c5e2852a9 for instance with vm_state building and task_state spawning. [ 609.239571] env[62383]: DEBUG nova.compute.manager [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Received event network-changed-99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 609.239571] env[62383]: DEBUG nova.compute.manager [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Refreshing instance network info cache due to event network-changed-99fff832-18f0-4caa-85b2-428c5e2852a9. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 609.239571] env[62383]: DEBUG oslo_concurrency.lockutils [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] Acquiring lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.324251] env[62383]: DEBUG nova.compute.manager [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Received event network-changed-c5143583-b4ea-45e7-9c76-40bb80e9b004 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 609.324251] env[62383]: DEBUG nova.compute.manager [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Refreshing instance network info cache due to event network-changed-c5143583-b4ea-45e7-9c76-40bb80e9b004. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 609.324251] env[62383]: DEBUG oslo_concurrency.lockutils [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] Acquiring lock "refresh_cache-0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.324251] env[62383]: DEBUG oslo_concurrency.lockutils [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] Acquired lock "refresh_cache-0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.324251] env[62383]: DEBUG nova.network.neutron [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Refreshing network info cache for port c5143583-b4ea-45e7-9c76-40bb80e9b004 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 609.365933] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846393d0-8c7d-413e-a77f-b51c01222a5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.374264] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148e4215-9d4f-45ce-8e2a-0b4b117cebed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.412983] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03eb9bc0-a9ce-4071-a1a3-3119f6748a26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.421133] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23b93bf-d395-4dbe-b668-54f56f6595e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.435813] env[62383]: DEBUG nova.compute.provider_tree [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.464882] env[62383]: DEBUG nova.network.neutron [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Successfully created port: f04b9ac5-a0ac-4535-944c-01e2d59d6db4 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.524154] env[62383]: DEBUG nova.compute.manager [None req-93688eac-4110-4b38-abe6-5d0ca2f17fde tempest-ServerDiagnosticsV248Test-252928681 tempest-ServerDiagnosticsV248Test-252928681-project-admin] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.525326] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d43be11-8d47-4ca4-9506-28df6a7c5158 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.531768] env[62383]: INFO nova.compute.manager [None req-93688eac-4110-4b38-abe6-5d0ca2f17fde tempest-ServerDiagnosticsV248Test-252928681 tempest-ServerDiagnosticsV248Test-252928681-project-admin] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Retrieving diagnostics [ 609.532478] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab6412b-4191-4a76-a66e-7f7de625b810 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.579161] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52109d4a-cd7c-f2b8-38ce-6797f0720187, 'name': SearchDatastore_Task, 'duration_secs': 0.033993} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.579488] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.579749] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 609.580047] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.580206] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.580408] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea548ee1-c7f1-464c-9642-29eec71272e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.582535] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-958a7cf0-ebc2-4d81-b256-89484408706d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.588818] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 609.588818] env[62383]: value = "task-2451121" [ 609.588818] env[62383]: _type = "Task" [ 609.588818] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.592811] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.592982] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.594070] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4823714-c04d-4d0d-b850-90b8fa73b99a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.598792] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.602067] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 609.602067] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52230fc0-9376-a776-6ccf-4f7e6ad928eb" [ 609.602067] env[62383]: _type = "Task" [ 609.602067] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.609132] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52230fc0-9376-a776-6ccf-4f7e6ad928eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.654432] env[62383]: DEBUG nova.network.neutron [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.675813] env[62383]: INFO nova.compute.manager [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Took 39.75 seconds to build instance. [ 609.678367] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fafd273-0d3c-4f79-a28f-471c6b0431bd tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "0d992155-24fa-4836-83c9-8f188f7d7efa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.750s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.795400] env[62383]: DEBUG nova.network.neutron [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Updating instance_info_cache with network_info: [{"id": "99fff832-18f0-4caa-85b2-428c5e2852a9", "address": "fa:16:3e:5c:ab:f2", "network": {"id": "7b95a38b-72a6-4f25-acb6-d5bb13e71c04", "bridge": "br-int", "label": "tempest-ServersTestJSON-1830616172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbff513aaffd4e61a7607c7655cecfcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8800a981-a89e-42e4-8be9-cace419ba9cb", "external-id": "nsx-vlan-transportzone-962", "segmentation_id": 962, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99fff832-18", "ovs_interfaceid": "99fff832-18f0-4caa-85b2-428c5e2852a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.939509] env[62383]: DEBUG nova.scheduler.client.report [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 610.015561] env[62383]: DEBUG nova.network.neutron [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Updated VIF entry in instance network info cache for port c5143583-b4ea-45e7-9c76-40bb80e9b004. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.016105] env[62383]: DEBUG nova.network.neutron [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Updating instance_info_cache with network_info: [{"id": "c5143583-b4ea-45e7-9c76-40bb80e9b004", "address": "fa:16:3e:06:5c:a7", "network": {"id": "777d5429-0250-4391-93ed-019a886f8d9f", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-194081184-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1266b6ffcfb4e658731a9e3345e6789", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee555dfd-3d1a-4220-89cd-ffba64e4acf0", "external-id": "nsx-vlan-transportzone-88", "segmentation_id": 88, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5143583-b4", "ovs_interfaceid": "c5143583-b4ea-45e7-9c76-40bb80e9b004", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.098942] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.110810] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52230fc0-9376-a776-6ccf-4f7e6ad928eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010814} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.111575] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e337fdee-e4f9-4d70-81fa-f4dc24c9853d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.117126] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 610.117126] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5219058a-c3da-e71f-92e7-dd464b43b690" [ 610.117126] env[62383]: _type = "Task" [ 610.117126] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.124937] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5219058a-c3da-e71f-92e7-dd464b43b690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.178365] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2c63f05d-1e2d-450a-9bc2-a8a10768392e tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.916s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.179997] env[62383]: DEBUG nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.234234] env[62383]: DEBUG nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 610.264617] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 610.264899] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.265132] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.265350] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.265560] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.265649] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 610.265860] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 610.266048] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 610.266253] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 610.266450] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 610.266615] env[62383]: DEBUG nova.virt.hardware [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 610.267537] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08f4aa3-13ec-49f9-9b8e-f6e7f90c3487 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.276365] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bd0707-e190-43ce-b088-caa1a011b8a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.297878] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Releasing lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.297987] env[62383]: DEBUG nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Instance network_info: |[{"id": "99fff832-18f0-4caa-85b2-428c5e2852a9", "address": "fa:16:3e:5c:ab:f2", "network": {"id": "7b95a38b-72a6-4f25-acb6-d5bb13e71c04", "bridge": "br-int", "label": "tempest-ServersTestJSON-1830616172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbff513aaffd4e61a7607c7655cecfcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8800a981-a89e-42e4-8be9-cace419ba9cb", "external-id": "nsx-vlan-transportzone-962", "segmentation_id": 962, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99fff832-18", "ovs_interfaceid": "99fff832-18f0-4caa-85b2-428c5e2852a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 610.299136] env[62383]: DEBUG oslo_concurrency.lockutils [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] Acquired lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.299136] env[62383]: DEBUG nova.network.neutron [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Refreshing network info cache for port 99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.302588] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:ab:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8800a981-a89e-42e4-8be9-cace419ba9cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99fff832-18f0-4caa-85b2-428c5e2852a9', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.307554] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Creating folder: Project (dbff513aaffd4e61a7607c7655cecfcd). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.311025] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-370410c3-5f74-4a8b-80a4-f2568575fcbb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.322279] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Created folder: Project (dbff513aaffd4e61a7607c7655cecfcd) in parent group-v496304. [ 610.322438] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Creating folder: Instances. Parent ref: group-v496366. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.323123] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b3a7822-9ac0-4d11-8e82-bdb9e4c8f4b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.332242] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Created folder: Instances in parent group-v496366. [ 610.333362] env[62383]: DEBUG oslo.service.loopingcall [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 610.333362] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 610.333362] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2eee039-af29-4af8-9c2a-7562d090d639 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.359680] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.359680] env[62383]: value = "task-2451124" [ 610.359680] env[62383]: _type = "Task" [ 610.359680] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.368945] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451124, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.444786] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.242s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.451307] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.318s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.454403] env[62383]: INFO nova.compute.claims [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.481221] env[62383]: INFO nova.scheduler.client.report [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Deleted allocations for instance 571a5250-8655-4f30-b193-919affbc1bd8 [ 610.519354] env[62383]: DEBUG oslo_concurrency.lockutils [req-7cba4f64-30cb-47fc-8f33-8d0588f78126 req-85bc3348-f387-4cae-a614-32a841175dac service nova] Releasing lock "refresh_cache-0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.573891] env[62383]: DEBUG nova.network.neutron [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Updated VIF entry in instance network info cache for port 99fff832-18f0-4caa-85b2-428c5e2852a9. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.574278] env[62383]: DEBUG nova.network.neutron [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Updating instance_info_cache with network_info: [{"id": "99fff832-18f0-4caa-85b2-428c5e2852a9", "address": "fa:16:3e:5c:ab:f2", "network": {"id": "7b95a38b-72a6-4f25-acb6-d5bb13e71c04", "bridge": "br-int", "label": "tempest-ServersTestJSON-1830616172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbff513aaffd4e61a7607c7655cecfcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8800a981-a89e-42e4-8be9-cace419ba9cb", "external-id": "nsx-vlan-transportzone-962", "segmentation_id": 962, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99fff832-18", "ovs_interfaceid": "99fff832-18f0-4caa-85b2-428c5e2852a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.601485] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451121, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.628126] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5219058a-c3da-e71f-92e7-dd464b43b690, 'name': SearchDatastore_Task, 'duration_secs': 0.014395} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.628475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.628854] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be/0dd47ff3-5a5b-4c51-8e6a-fc11449f21be.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.629186] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3f068b9-e7b1-488f-afb3-a9832dbf7534 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.635940] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 610.635940] env[62383]: value = "task-2451125" [ 610.635940] env[62383]: _type = "Task" [ 610.635940] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.644306] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.683394] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.703328] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.870755] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451124, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.990578] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9314f72-f349-472c-b264-ce31fdcded7d tempest-TenantUsagesTestJSON-356774607 tempest-TenantUsagesTestJSON-356774607-project-member] Lock "571a5250-8655-4f30-b193-919affbc1bd8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.203s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.077683] env[62383]: DEBUG oslo_concurrency.lockutils [req-8ad028a7-b696-41af-b829-22145e995353 req-c3e7168a-72dc-4071-8b77-d3c025be1f08 service nova] Releasing lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.102180] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451121, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.032357} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.102554] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.102692] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.102949] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19314686-4b94-4ea0-8c6a-788ed311f069 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.112025] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 611.112025] env[62383]: value = "task-2451126" [ 611.112025] env[62383]: _type = "Task" [ 611.112025] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.119565] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451126, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.150472] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451125, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.218230] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.374309] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451124, 'name': CreateVM_Task, 'duration_secs': 0.578523} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.374309] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 611.374309] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.374309] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.374309] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 611.374309] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ebea670-4952-4081-9b63-8d22b664b2e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.379259] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 611.379259] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5259df88-bb4b-bc0c-e215-46d2c4847dc4" [ 611.379259] env[62383]: _type = "Task" [ 611.379259] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.388354] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5259df88-bb4b-bc0c-e215-46d2c4847dc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.397691] env[62383]: DEBUG nova.network.neutron [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Successfully updated port: f04b9ac5-a0ac-4535-944c-01e2d59d6db4 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 611.624972] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451126, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093496} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.626063] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 611.626862] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46eb4f1e-3f6a-46bf-ad7a-93c0787ba594 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.650381] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 611.655856] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12834c93-1b53-422a-809d-f3d1e6119b56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.675589] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451125, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63772} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.677175] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be/0dd47ff3-5a5b-4c51-8e6a-fc11449f21be.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.677426] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.677741] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 611.677741] env[62383]: value = "task-2451127" [ 611.677741] env[62383]: _type = "Task" [ 611.677741] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.677925] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab219187-cf86-4f73-9626-7fbfdc8d7da2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.685456] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "184d0caa-85c2-426d-82e5-ac52e525fe74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.685653] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.685849] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "184d0caa-85c2-426d-82e5-ac52e525fe74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.686040] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.686210] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.688295] env[62383]: INFO nova.compute.manager [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Terminating instance [ 611.693550] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 611.693550] env[62383]: value = "task-2451128" [ 611.693550] env[62383]: _type = "Task" [ 611.693550] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.693835] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451127, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.705964] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451128, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.893894] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5259df88-bb4b-bc0c-e215-46d2c4847dc4, 'name': SearchDatastore_Task, 'duration_secs': 0.016209} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.896945] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.897298] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.897593] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.897795] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.897984] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 611.903022] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b27a18a-36b2-4f67-a818-684b35b7bc92 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.903022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "refresh_cache-13db2c17-ccba-4336-929a-0d01202c5143" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.903022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "refresh_cache-13db2c17-ccba-4336-929a-0d01202c5143" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.903022] env[62383]: DEBUG nova.network.neutron [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 611.907727] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 611.907727] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 611.908816] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a53c91d-0a99-423a-8f1f-910c0a67da2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.916024] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 611.916024] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52af58c9-b4d8-3385-abde-29d5af27dba6" [ 611.916024] env[62383]: _type = "Task" [ 611.916024] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.927099] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52af58c9-b4d8-3385-abde-29d5af27dba6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.997195] env[62383]: DEBUG nova.compute.manager [req-1db74b89-a8da-4a2f-b266-ef5937e15a56 req-39b74507-ba43-4880-a7b7-ca12302facc6 service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Received event network-vif-plugged-f04b9ac5-a0ac-4535-944c-01e2d59d6db4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 611.997420] env[62383]: DEBUG oslo_concurrency.lockutils [req-1db74b89-a8da-4a2f-b266-ef5937e15a56 req-39b74507-ba43-4880-a7b7-ca12302facc6 service nova] Acquiring lock "13db2c17-ccba-4336-929a-0d01202c5143-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.997631] env[62383]: DEBUG oslo_concurrency.lockutils [req-1db74b89-a8da-4a2f-b266-ef5937e15a56 req-39b74507-ba43-4880-a7b7-ca12302facc6 service nova] Lock "13db2c17-ccba-4336-929a-0d01202c5143-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.997856] env[62383]: DEBUG oslo_concurrency.lockutils [req-1db74b89-a8da-4a2f-b266-ef5937e15a56 req-39b74507-ba43-4880-a7b7-ca12302facc6 service nova] Lock "13db2c17-ccba-4336-929a-0d01202c5143-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.998048] env[62383]: DEBUG nova.compute.manager [req-1db74b89-a8da-4a2f-b266-ef5937e15a56 req-39b74507-ba43-4880-a7b7-ca12302facc6 service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] No waiting events found dispatching network-vif-plugged-f04b9ac5-a0ac-4535-944c-01e2d59d6db4 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 611.998217] env[62383]: WARNING nova.compute.manager [req-1db74b89-a8da-4a2f-b266-ef5937e15a56 req-39b74507-ba43-4880-a7b7-ca12302facc6 service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Received unexpected event network-vif-plugged-f04b9ac5-a0ac-4535-944c-01e2d59d6db4 for instance with vm_state building and task_state spawning. [ 612.085198] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5aded4-3bab-47bf-8764-240eb0f965c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.092887] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c79e128-2f8f-46ad-b93f-595b363287b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.869750] env[62383]: DEBUG nova.compute.manager [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 612.870146] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 612.884296] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56eaf76-dc9c-4d71-808f-2d3248488cdf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.893699] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fdd1924-a462-4eca-8222-3666a9861026 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.907105] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 612.918024] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8b74179-4dc2-4135-b892-76319afd1db4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.920396] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451128, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.201126} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.920719] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451127, 'name': ReconfigVM_Task, 'duration_secs': 1.014374} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.926241] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.926673] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.927587] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52af58c9-b4d8-3385-abde-29d5af27dba6, 'name': SearchDatastore_Task, 'duration_secs': 0.01686} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.928478] env[62383]: DEBUG nova.network.neutron [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.934760] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2ee76c-6ee4-4317-aec1-458c7991ff50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.938162] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd64b799-05d5-40b0-b941-1b15d1b94fb5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.942112] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-877c78ea-5b32-44dd-8e1d-e70f2fd95303 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.947168] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b554591a-505c-4fb5-8601-1ec6dc26e88a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.952694] env[62383]: DEBUG oslo_vmware.api [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 612.952694] env[62383]: value = "task-2451129" [ 612.952694] env[62383]: _type = "Task" [ 612.952694] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.986679] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 612.986679] env[62383]: value = "task-2451130" [ 612.986679] env[62383]: _type = "Task" [ 612.986679] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.987107] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 612.987107] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e91f99-5f5d-8d0f-d225-89f445e2456c" [ 612.987107] env[62383]: _type = "Task" [ 612.987107] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.990000] env[62383]: DEBUG nova.compute.provider_tree [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.003679] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be/0dd47ff3-5a5b-4c51-8e6a-fc11449f21be.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.005913] env[62383]: DEBUG nova.scheduler.client.report [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 613.014344] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e252e2c-1dd1-416c-bd6c-b1fe3b653205 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.039977] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.589s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.040704] env[62383]: DEBUG nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 613.057592] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.878s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.057991] env[62383]: DEBUG nova.objects.instance [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 613.062153] env[62383]: DEBUG oslo_vmware.api [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.076481] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451130, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.077382] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e91f99-5f5d-8d0f-d225-89f445e2456c, 'name': SearchDatastore_Task, 'duration_secs': 0.015913} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.079216] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.079661] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] f28beb17-8455-49d3-8be0-7636b9abe4e8/f28beb17-8455-49d3-8be0-7636b9abe4e8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 613.080395] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 613.080395] env[62383]: value = "task-2451131" [ 613.080395] env[62383]: _type = "Task" [ 613.080395] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.080608] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f2adc8f-a0ea-44b4-9485-6c47b507f481 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.097348] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.099151] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 613.099151] env[62383]: value = "task-2451132" [ 613.099151] env[62383]: _type = "Task" [ 613.099151] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.107257] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.293441] env[62383]: DEBUG nova.network.neutron [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Updating instance_info_cache with network_info: [{"id": "f04b9ac5-a0ac-4535-944c-01e2d59d6db4", "address": "fa:16:3e:3a:38:9f", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf04b9ac5-a0", "ovs_interfaceid": "f04b9ac5-a0ac-4535-944c-01e2d59d6db4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.464187] env[62383]: DEBUG oslo_vmware.api [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451129, 'name': PowerOffVM_Task, 'duration_secs': 0.198679} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.464490] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 613.464662] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 613.467347] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33a9268d-aa68-4db9-9c87-1f0fb4d68cee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.525432] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451130, 'name': Rename_Task, 'duration_secs': 0.175352} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.525432] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 613.525432] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8592d1a-bace-41b1-b9f4-2acb8b2e0beb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.535023] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 613.535023] env[62383]: value = "task-2451134" [ 613.535023] env[62383]: _type = "Task" [ 613.535023] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.542356] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 613.542559] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 613.542733] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Deleting the datastore file [datastore2] 184d0caa-85c2-426d-82e5-ac52e525fe74 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 613.543691] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-480aa235-9b77-4026-b910-babe753e7edb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.550607] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.560286] env[62383]: DEBUG oslo_vmware.api [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for the task: (returnval){ [ 613.560286] env[62383]: value = "task-2451135" [ 613.560286] env[62383]: _type = "Task" [ 613.560286] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.564331] env[62383]: DEBUG nova.compute.utils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.578503] env[62383]: DEBUG nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 613.578720] env[62383]: DEBUG nova.network.neutron [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 613.580881] env[62383]: DEBUG oslo_vmware.api [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451135, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.594183] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451131, 'name': ReconfigVM_Task, 'duration_secs': 0.321423} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.594441] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be/0dd47ff3-5a5b-4c51-8e6a-fc11449f21be.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.595410] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d895a0b0-33d2-4bdc-b3ba-8f586bc7a565 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.605455] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 613.605455] env[62383]: value = "task-2451136" [ 613.605455] env[62383]: _type = "Task" [ 613.605455] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.612347] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451132, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.619015] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451136, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.636321] env[62383]: DEBUG nova.policy [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8500ba91ab5445d82406ff31a9ea721', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e20c8351a13a427db4fccbac7108c205', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 613.795705] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "refresh_cache-13db2c17-ccba-4336-929a-0d01202c5143" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.795929] env[62383]: DEBUG nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Instance network_info: |[{"id": "f04b9ac5-a0ac-4535-944c-01e2d59d6db4", "address": "fa:16:3e:3a:38:9f", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf04b9ac5-a0", "ovs_interfaceid": "f04b9ac5-a0ac-4535-944c-01e2d59d6db4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 613.796511] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:38:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f04b9ac5-a0ac-4535-944c-01e2d59d6db4', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 613.804120] env[62383]: DEBUG oslo.service.loopingcall [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.804364] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 613.804585] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4acf87cc-2545-497b-bc7f-e18fe410b2a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.824161] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 613.824161] env[62383]: value = "task-2451137" [ 613.824161] env[62383]: _type = "Task" [ 613.824161] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.831738] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451137, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.045851] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.068291] env[62383]: DEBUG oslo_vmware.api [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Task: {'id': task-2451135, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.361543} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.068634] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.068831] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.069030] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.069204] env[62383]: INFO nova.compute.manager [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Took 1.20 seconds to destroy the instance on the hypervisor. [ 614.069503] env[62383]: DEBUG oslo.service.loopingcall [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.069662] env[62383]: DEBUG nova.compute.manager [-] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 614.069760] env[62383]: DEBUG nova.network.neutron [-] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.074665] env[62383]: DEBUG nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 614.084154] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd6576b-bf1a-4f44-b5ec-ed6fe2189672 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.084911] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.418s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.087593] env[62383]: INFO nova.compute.claims [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 614.112928] env[62383]: DEBUG nova.network.neutron [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Successfully created port: 2dd94f31-46c1-4662-9f19-e6f69a3decf8 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.123432] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451132, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531329} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.123659] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451136, 'name': Rename_Task, 'duration_secs': 0.135859} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.124580] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] f28beb17-8455-49d3-8be0-7636b9abe4e8/f28beb17-8455-49d3-8be0-7636b9abe4e8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 614.124792] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 614.129301] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.129301] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c825fed9-ef92-4e28-968d-193205cce6b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.129301] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96e86722-2f57-4ebb-acf7-8f51b8c298d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.138643] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 614.138643] env[62383]: value = "task-2451139" [ 614.138643] env[62383]: _type = "Task" [ 614.138643] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.140438] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 614.140438] env[62383]: value = "task-2451138" [ 614.140438] env[62383]: _type = "Task" [ 614.140438] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.155021] env[62383]: DEBUG nova.compute.manager [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Received event network-changed-f04b9ac5-a0ac-4535-944c-01e2d59d6db4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 614.155021] env[62383]: DEBUG nova.compute.manager [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Refreshing instance network info cache due to event network-changed-f04b9ac5-a0ac-4535-944c-01e2d59d6db4. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 614.157363] env[62383]: DEBUG oslo_concurrency.lockutils [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] Acquiring lock "refresh_cache-13db2c17-ccba-4336-929a-0d01202c5143" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.157363] env[62383]: DEBUG oslo_concurrency.lockutils [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] Acquired lock "refresh_cache-13db2c17-ccba-4336-929a-0d01202c5143" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.157363] env[62383]: DEBUG nova.network.neutron [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Refreshing network info cache for port f04b9ac5-a0ac-4535-944c-01e2d59d6db4 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.163328] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451139, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.173319] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451138, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.335083] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451137, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.547803] env[62383]: DEBUG oslo_vmware.api [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451134, 'name': PowerOnVM_Task, 'duration_secs': 0.778142} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.548227] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.548568] env[62383]: DEBUG nova.compute.manager [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.549643] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3d5ab6-f241-4a94-a635-41244e27dd77 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.660709] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451139, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.669770] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451138, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082522} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.670158] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 614.671348] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b421b49-0d75-4b2d-b8a3-e144c804dcf1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.697752] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] f28beb17-8455-49d3-8be0-7636b9abe4e8/f28beb17-8455-49d3-8be0-7636b9abe4e8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 614.701242] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0a88b83-15d7-4362-83aa-202ddc0f54cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.716445] env[62383]: DEBUG nova.compute.manager [req-be3b9dff-dd11-4a9e-8536-a5aaca48743c req-895ccdc8-51c7-41c7-b8b4-472106be0c34 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Received event network-vif-deleted-a993c45d-5e8d-4cd7-b51a-991b816fa089 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 614.716661] env[62383]: INFO nova.compute.manager [req-be3b9dff-dd11-4a9e-8536-a5aaca48743c req-895ccdc8-51c7-41c7-b8b4-472106be0c34 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Neutron deleted interface a993c45d-5e8d-4cd7-b51a-991b816fa089; detaching it from the instance and deleting it from the info cache [ 614.717159] env[62383]: DEBUG nova.network.neutron [req-be3b9dff-dd11-4a9e-8536-a5aaca48743c req-895ccdc8-51c7-41c7-b8b4-472106be0c34 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.727620] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 614.727620] env[62383]: value = "task-2451140" [ 614.727620] env[62383]: _type = "Task" [ 614.727620] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.736762] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451140, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.839125] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451137, 'name': CreateVM_Task, 'duration_secs': 0.530627} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.842141] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 614.843083] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.843083] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.843386] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 614.843977] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff41bc7b-a922-4ffe-9d25-9bbc4eae042d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.849012] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 614.849012] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52042a39-4c5e-7e0c-799f-cd69040bdee9" [ 614.849012] env[62383]: _type = "Task" [ 614.849012] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.858406] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52042a39-4c5e-7e0c-799f-cd69040bdee9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.919595] env[62383]: DEBUG nova.network.neutron [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Updated VIF entry in instance network info cache for port f04b9ac5-a0ac-4535-944c-01e2d59d6db4. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 614.920077] env[62383]: DEBUG nova.network.neutron [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Updating instance_info_cache with network_info: [{"id": "f04b9ac5-a0ac-4535-944c-01e2d59d6db4", "address": "fa:16:3e:3a:38:9f", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf04b9ac5-a0", "ovs_interfaceid": "f04b9ac5-a0ac-4535-944c-01e2d59d6db4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.989148] env[62383]: DEBUG nova.network.neutron [-] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.077040] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.087125] env[62383]: DEBUG nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 615.110983] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 615.111291] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 615.111452] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 615.111675] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 615.111839] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 615.111994] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 615.112278] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 615.112470] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 615.112660] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 615.113011] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 615.113011] env[62383]: DEBUG nova.virt.hardware [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 615.113829] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61d6922-0cc6-4132-af1b-52985968665e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.122344] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5747bd7c-4ca0-4019-a3ff-e529fd8f9772 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.150884] env[62383]: DEBUG oslo_vmware.api [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451139, 'name': PowerOnVM_Task, 'duration_secs': 0.659785} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.151196] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.151196] env[62383]: INFO nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Took 10.54 seconds to spawn the instance on the hypervisor. [ 615.151491] env[62383]: DEBUG nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 615.152211] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88531dd-0721-49c9-bde0-8fa64467439c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.222879] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-821491e8-2689-4529-9774-8f10110fe694 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.238046] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451140, 'name': ReconfigVM_Task, 'duration_secs': 0.291557} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.240071] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062b44a5-9136-4041-b0d9-86e464e752d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.250580] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Reconfigured VM instance instance-00000017 to attach disk [datastore2] f28beb17-8455-49d3-8be0-7636b9abe4e8/f28beb17-8455-49d3-8be0-7636b9abe4e8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 615.255546] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e3d820e-cda9-4193-8068-a9caaf6bd2d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.264607] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 615.264607] env[62383]: value = "task-2451141" [ 615.264607] env[62383]: _type = "Task" [ 615.264607] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.282166] env[62383]: DEBUG nova.compute.manager [req-be3b9dff-dd11-4a9e-8536-a5aaca48743c req-895ccdc8-51c7-41c7-b8b4-472106be0c34 service nova] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Detach interface failed, port_id=a993c45d-5e8d-4cd7-b51a-991b816fa089, reason: Instance 184d0caa-85c2-426d-82e5-ac52e525fe74 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 615.292582] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451141, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.361486] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52042a39-4c5e-7e0c-799f-cd69040bdee9, 'name': SearchDatastore_Task, 'duration_secs': 0.019796} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.364870] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.365167] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.365439] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.365745] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.366066] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.366385] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c28a6b2-7a3b-465c-b13c-0500c2446732 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.380490] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.380689] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 615.381437] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f73526d5-319d-4dfb-812f-b5e4232709d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.387630] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 615.387630] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]529bb3ff-30c8-29c3-19f2-d476b2868e7d" [ 615.387630] env[62383]: _type = "Task" [ 615.387630] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.398683] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529bb3ff-30c8-29c3-19f2-d476b2868e7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.422750] env[62383]: DEBUG oslo_concurrency.lockutils [req-bb7904f8-191b-411b-b6ee-0f7f50f8102c req-6c216f3d-280e-4a90-a275-b4a7796fbd9a service nova] Releasing lock "refresh_cache-13db2c17-ccba-4336-929a-0d01202c5143" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.493025] env[62383]: INFO nova.compute.manager [-] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Took 1.42 seconds to deallocate network for instance. [ 615.674121] env[62383]: INFO nova.compute.manager [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Took 36.25 seconds to build instance. [ 615.745032] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fe66a2-5e33-41bd-bc38-afe3aeee528b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.755364] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31c6386-3492-497d-80da-4d9fef33b7e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.800123] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6c48f1-7c2b-43e2-97a1-b681475dab58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.808629] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451141, 'name': Rename_Task, 'duration_secs': 0.154037} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.811929] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 615.812466] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-091a0a46-2ac5-45d8-90b9-f5743a61c22d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.816955] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedbec94-a238-4604-abca-7e088a38a663 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.832122] env[62383]: DEBUG nova.compute.provider_tree [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.834552] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 615.834552] env[62383]: value = "task-2451142" [ 615.834552] env[62383]: _type = "Task" [ 615.834552] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.843750] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451142, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.900334] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529bb3ff-30c8-29c3-19f2-d476b2868e7d, 'name': SearchDatastore_Task, 'duration_secs': 0.025944} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.901134] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e90e36-c2c5-42d0-8665-eb4f4474bff7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.906057] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 615.906057] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525bc97e-8197-fc1a-9dec-cc50043a7359" [ 615.906057] env[62383]: _type = "Task" [ 615.906057] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.916861] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525bc97e-8197-fc1a-9dec-cc50043a7359, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.999615] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.111215] env[62383]: DEBUG nova.network.neutron [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Successfully updated port: 2dd94f31-46c1-4662-9f19-e6f69a3decf8 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 616.178754] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc9923ba-dff6-4d98-99fe-b88e142e638a tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.222s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.304265] env[62383]: INFO nova.compute.manager [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Rebuilding instance [ 616.332369] env[62383]: DEBUG nova.compute.manager [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Received event network-vif-plugged-2dd94f31-46c1-4662-9f19-e6f69a3decf8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 616.332369] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] Acquiring lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.332369] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.332369] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.332369] env[62383]: DEBUG nova.compute.manager [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] No waiting events found dispatching network-vif-plugged-2dd94f31-46c1-4662-9f19-e6f69a3decf8 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 616.332602] env[62383]: WARNING nova.compute.manager [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Received unexpected event network-vif-plugged-2dd94f31-46c1-4662-9f19-e6f69a3decf8 for instance with vm_state building and task_state spawning. [ 616.332602] env[62383]: DEBUG nova.compute.manager [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Received event network-changed-2dd94f31-46c1-4662-9f19-e6f69a3decf8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 616.332602] env[62383]: DEBUG nova.compute.manager [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Refreshing instance network info cache due to event network-changed-2dd94f31-46c1-4662-9f19-e6f69a3decf8. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 616.332793] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] Acquiring lock "refresh_cache-2337e9a2-736c-4d58-ac2e-04c8ad813be4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.332925] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] Acquired lock "refresh_cache-2337e9a2-736c-4d58-ac2e-04c8ad813be4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.333100] env[62383]: DEBUG nova.network.neutron [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Refreshing network info cache for port 2dd94f31-46c1-4662-9f19-e6f69a3decf8 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.341662] env[62383]: DEBUG nova.scheduler.client.report [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 616.369134] env[62383]: DEBUG oslo_vmware.api [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451142, 'name': PowerOnVM_Task, 'duration_secs': 0.467834} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.369134] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 616.369134] env[62383]: INFO nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Took 8.89 seconds to spawn the instance on the hypervisor. [ 616.369134] env[62383]: DEBUG nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 616.369134] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd248918-a55e-43d5-ba21-fd553ae3a867 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.387791] env[62383]: DEBUG nova.compute.manager [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 616.389449] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7811231-770c-44af-b9df-ff7f4df2ea3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.416709] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525bc97e-8197-fc1a-9dec-cc50043a7359, 'name': SearchDatastore_Task, 'duration_secs': 0.018032} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.416869] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.417142] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 13db2c17-ccba-4336-929a-0d01202c5143/13db2c17-ccba-4336-929a-0d01202c5143.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 616.417439] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99e59d1f-ea69-4763-81c6-0dac807a5eca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.423714] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 616.423714] env[62383]: value = "task-2451143" [ 616.423714] env[62383]: _type = "Task" [ 616.423714] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.433718] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.617984] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "refresh_cache-2337e9a2-736c-4d58-ac2e-04c8ad813be4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.686884] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.850670] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.851271] env[62383]: DEBUG nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 616.859041] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.861s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.859041] env[62383]: INFO nova.compute.claims [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.889022] env[62383]: INFO nova.compute.manager [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Took 37.29 seconds to build instance. [ 616.935448] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451143, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.128271] env[62383]: DEBUG nova.network.neutron [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.215436] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.277440] env[62383]: DEBUG nova.network.neutron [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.358204] env[62383]: DEBUG nova.compute.utils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.359921] env[62383]: DEBUG nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 617.360112] env[62383]: DEBUG nova.network.neutron [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 617.393024] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dd71b2ed-aacf-4b6e-988b-437255f5b0d8 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.955s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.403703] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 617.404211] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e88eab1a-4bd8-4de6-ad10-4031eb804c9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.417352] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 617.417352] env[62383]: value = "task-2451144" [ 617.417352] env[62383]: _type = "Task" [ 617.417352] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.432221] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.433472] env[62383]: DEBUG nova.policy [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a65194086c047f992d06ac9f1aa864a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66211918170f4869be521cd36e9248b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 617.439634] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644659} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.439885] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 13db2c17-ccba-4336-929a-0d01202c5143/13db2c17-ccba-4336-929a-0d01202c5143.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 617.440113] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 617.440359] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17d16d78-05ef-4f62-8725-c9791399fcbe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.448708] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 617.448708] env[62383]: value = "task-2451145" [ 617.448708] env[62383]: _type = "Task" [ 617.448708] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.461238] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451145, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.780658] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6f10b44-8398-406c-8c22-7df2de445964 req-bfb79c01-bc0d-4779-9bbb-f51d01a1e96a service nova] Releasing lock "refresh_cache-2337e9a2-736c-4d58-ac2e-04c8ad813be4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.782645] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "refresh_cache-2337e9a2-736c-4d58-ac2e-04c8ad813be4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.782645] env[62383]: DEBUG nova.network.neutron [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.864427] env[62383]: DEBUG nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 617.898520] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 617.927747] env[62383]: DEBUG nova.compute.manager [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 617.929243] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b140a666-a798-4bcc-94ca-74e6d7f8cfa7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.934882] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451144, 'name': PowerOffVM_Task, 'duration_secs': 0.121699} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.935461] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 617.935682] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 617.936416] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc00f619-b0be-45f9-a13e-e6e18e829d78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.950557] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 617.953970] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92478de9-26bf-402a-9d9b-c3cecf4cfb38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.960746] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451145, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076846} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.960871] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 617.961593] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d175ef-9f80-48bb-ae6b-8441785c7dd6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.993256] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] 13db2c17-ccba-4336-929a-0d01202c5143/13db2c17-ccba-4336-929a-0d01202c5143.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 617.996456] env[62383]: DEBUG nova.network.neutron [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Successfully created port: b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.998243] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3367b60-aa09-4352-aedf-7e005d81a0fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.014946] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 618.014946] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 618.014946] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Deleting the datastore file [datastore2] 69569fa0-5175-453e-9875-9ef46c723da8 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 618.016587] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9ddeed7-a45f-4d7a-a2ec-810cfa77a1de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.022763] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 618.022763] env[62383]: value = "task-2451148" [ 618.022763] env[62383]: _type = "Task" [ 618.022763] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.024260] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 618.024260] env[62383]: value = "task-2451147" [ 618.024260] env[62383]: _type = "Task" [ 618.024260] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.037226] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.040627] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451147, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.161168] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.161598] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.326637] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "583138d1-f928-4e33-a443-11c627203c44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.326957] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.431379] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.453934] env[62383]: INFO nova.compute.manager [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] instance snapshotting [ 618.456277] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcab4cca-59fc-475c-a24d-76ce3d32cb94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.477619] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78af478a-8c29-4c2f-83e2-1ac91623a21a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.537246] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132744} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.542431] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 618.542633] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 618.542813] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 618.545198] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451147, 'name': ReconfigVM_Task, 'duration_secs': 0.341814} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.546887] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Reconfigured VM instance instance-00000018 to attach disk [datastore2] 13db2c17-ccba-4336-929a-0d01202c5143/13db2c17-ccba-4336-929a-0d01202c5143.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 618.546887] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-482c152c-46d9-4d9e-9549-2a8de1f1c969 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.555406] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 618.555406] env[62383]: value = "task-2451149" [ 618.555406] env[62383]: _type = "Task" [ 618.555406] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.564161] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451149, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.581682] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-666a3b57-f9d5-415f-94e7-977684723cce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.585650] env[62383]: DEBUG nova.network.neutron [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.592405] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8111bd2-d936-4167-a181-87766d42a0ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.625199] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ee996e-5670-4b1a-b614-b02a5b715015 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.633085] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a850f3-ae01-45a3-95f9-c346278cf987 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.648310] env[62383]: DEBUG nova.compute.provider_tree [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.671906] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.671998] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 618.672942] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Rebuilding the list of instances to heal {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 618.821495] env[62383]: DEBUG nova.compute.manager [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Received event network-changed-99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 618.821683] env[62383]: DEBUG nova.compute.manager [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Refreshing instance network info cache due to event network-changed-99fff832-18f0-4caa-85b2-428c5e2852a9. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 618.822087] env[62383]: DEBUG oslo_concurrency.lockutils [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] Acquiring lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.822322] env[62383]: DEBUG oslo_concurrency.lockutils [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] Acquired lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.822525] env[62383]: DEBUG nova.network.neutron [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Refreshing network info cache for port 99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.832087] env[62383]: DEBUG nova.network.neutron [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Updating instance_info_cache with network_info: [{"id": "2dd94f31-46c1-4662-9f19-e6f69a3decf8", "address": "fa:16:3e:86:7e:c4", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd94f31-46", "ovs_interfaceid": "2dd94f31-46c1-4662-9f19-e6f69a3decf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.891953] env[62383]: DEBUG nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 618.927631] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 618.927883] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.928064] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 618.928257] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.928836] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 618.929033] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 618.929259] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 618.929426] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 618.929598] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 618.929760] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 618.930450] env[62383]: DEBUG nova.virt.hardware [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 618.930839] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8934e791-4608-4b3a-942d-cbe0be8efd88 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.940153] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b82191f-7f58-472c-8866-06d23f313b3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.990849] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 618.990849] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2fdb0b74-a1a6-4c6f-a26c-772a70c046bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.997617] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 618.997617] env[62383]: value = "task-2451150" [ 618.997617] env[62383]: _type = "Task" [ 618.997617] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.007292] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451150, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.067232] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451149, 'name': Rename_Task, 'duration_secs': 0.141218} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.067352] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 619.067607] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca072d7b-9374-44bf-9c07-faeccad5807a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.074149] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 619.074149] env[62383]: value = "task-2451151" [ 619.074149] env[62383]: _type = "Task" [ 619.074149] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.083114] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.151569] env[62383]: DEBUG nova.scheduler.client.report [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.178321] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 619.178321] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 619.178321] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 619.178598] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 619.210569] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.210569] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.210569] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 619.210569] env[62383]: DEBUG nova.objects.instance [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lazy-loading 'info_cache' on Instance uuid 8e911bad-5408-4588-9865-912ce4457d34 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 619.334888] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "refresh_cache-2337e9a2-736c-4d58-ac2e-04c8ad813be4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.335316] env[62383]: DEBUG nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Instance network_info: |[{"id": "2dd94f31-46c1-4662-9f19-e6f69a3decf8", "address": "fa:16:3e:86:7e:c4", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd94f31-46", "ovs_interfaceid": "2dd94f31-46c1-4662-9f19-e6f69a3decf8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 619.335805] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:7e:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dd94f31-46c1-4662-9f19-e6f69a3decf8', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.344018] env[62383]: DEBUG oslo.service.loopingcall [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.344285] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.344889] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a8eb1ba-f8e8-4ba8-8111-f9bc98d5d402 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.371143] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.371143] env[62383]: value = "task-2451152" [ 619.371143] env[62383]: _type = "Task" [ 619.371143] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.381877] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451152, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.508574] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451150, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.557336] env[62383]: DEBUG nova.network.neutron [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Updated VIF entry in instance network info cache for port 99fff832-18f0-4caa-85b2-428c5e2852a9. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 619.557678] env[62383]: DEBUG nova.network.neutron [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Updating instance_info_cache with network_info: [{"id": "99fff832-18f0-4caa-85b2-428c5e2852a9", "address": "fa:16:3e:5c:ab:f2", "network": {"id": "7b95a38b-72a6-4f25-acb6-d5bb13e71c04", "bridge": "br-int", "label": "tempest-ServersTestJSON-1830616172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbff513aaffd4e61a7607c7655cecfcd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8800a981-a89e-42e4-8be9-cace419ba9cb", "external-id": "nsx-vlan-transportzone-962", "segmentation_id": 962, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99fff832-18", "ovs_interfaceid": "99fff832-18f0-4caa-85b2-428c5e2852a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.582019] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.582372] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.582592] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.582904] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.583135] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.583470] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.583770] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.583993] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.584250] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.584472] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.584720] env[62383]: DEBUG nova.virt.hardware [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.586060] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f13f82-eed5-461f-a5ae-a52890c642d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.597422] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae813f15-be52-495e-a489-83531b5e8b57 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.602489] env[62383]: DEBUG oslo_vmware.api [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451151, 'name': PowerOnVM_Task, 'duration_secs': 0.460255} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.602780] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.603020] env[62383]: INFO nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Took 9.37 seconds to spawn the instance on the hypervisor. [ 619.603236] env[62383]: DEBUG nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.604501] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632afb7a-6981-4e5e-a8ac-89acb9bf0168 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.615107] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.621350] env[62383]: DEBUG oslo.service.loopingcall [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.622391] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.622644] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81913663-94e7-42f9-aa3a-26dbb7e624e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.647634] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.647634] env[62383]: value = "task-2451153" [ 619.647634] env[62383]: _type = "Task" [ 619.647634] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.656702] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451153, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.657578] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.801s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.658171] env[62383]: DEBUG nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 619.660768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.344s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.661000] env[62383]: DEBUG nova.objects.instance [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lazy-loading 'resources' on Instance uuid a170fd95-3f7f-4315-a063-b9d02a7a1af4 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 619.792438] env[62383]: DEBUG nova.compute.manager [req-94c5ac90-0e4c-4d20-8ee5-2df1bd14551b req-a141de82-32d7-4694-992c-0e0282e66cab service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Received event network-vif-plugged-b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 619.792438] env[62383]: DEBUG oslo_concurrency.lockutils [req-94c5ac90-0e4c-4d20-8ee5-2df1bd14551b req-a141de82-32d7-4694-992c-0e0282e66cab service nova] Acquiring lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.792726] env[62383]: DEBUG oslo_concurrency.lockutils [req-94c5ac90-0e4c-4d20-8ee5-2df1bd14551b req-a141de82-32d7-4694-992c-0e0282e66cab service nova] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.794087] env[62383]: DEBUG oslo_concurrency.lockutils [req-94c5ac90-0e4c-4d20-8ee5-2df1bd14551b req-a141de82-32d7-4694-992c-0e0282e66cab service nova] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.794087] env[62383]: DEBUG nova.compute.manager [req-94c5ac90-0e4c-4d20-8ee5-2df1bd14551b req-a141de82-32d7-4694-992c-0e0282e66cab service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] No waiting events found dispatching network-vif-plugged-b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 619.794087] env[62383]: WARNING nova.compute.manager [req-94c5ac90-0e4c-4d20-8ee5-2df1bd14551b req-a141de82-32d7-4694-992c-0e0282e66cab service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Received unexpected event network-vif-plugged-b44ff87a-66f9-4720-9a57-b485496554c7 for instance with vm_state building and task_state spawning. [ 619.882104] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451152, 'name': CreateVM_Task, 'duration_secs': 0.408364} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.882696] env[62383]: DEBUG nova.network.neutron [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Successfully updated port: b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 619.883554] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 619.884369] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.886132] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.886132] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 619.886132] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79cbcd26-8c3e-4ccc-b9c8-171188f9a177 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.889707] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 619.889707] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520ac379-9a4d-cce0-b742-99ad7e21cb3b" [ 619.889707] env[62383]: _type = "Task" [ 619.889707] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.898592] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520ac379-9a4d-cce0-b742-99ad7e21cb3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.009019] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451150, 'name': CreateSnapshot_Task, 'duration_secs': 0.68197} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.009176] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 620.009924] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99661b7d-b408-42d6-b9eb-57eaac1d1b5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.059933] env[62383]: DEBUG oslo_concurrency.lockutils [req-871bcd0b-d98c-41e5-a1ed-44dd5ac3af23 req-39f1e2e5-e3a0-4c2d-add4-23d6b25660ba service nova] Releasing lock "refresh_cache-f28beb17-8455-49d3-8be0-7636b9abe4e8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.063914] env[62383]: DEBUG nova.compute.manager [None req-14975d24-e3e3-4457-be2d-0010091d23d7 tempest-ServerDiagnosticsV248Test-252928681 tempest-ServerDiagnosticsV248Test-252928681-project-admin] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 620.064993] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8142687-9fd0-4cc5-9bf7-fb1b31c0e3b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.071607] env[62383]: INFO nova.compute.manager [None req-14975d24-e3e3-4457-be2d-0010091d23d7 tempest-ServerDiagnosticsV248Test-252928681 tempest-ServerDiagnosticsV248Test-252928681-project-admin] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Retrieving diagnostics [ 620.072315] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e54cc8a-0ee2-4f4d-ac99-de77e50dafd1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.153507] env[62383]: INFO nova.compute.manager [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Took 37.79 seconds to build instance. [ 620.159812] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451153, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.164140] env[62383]: DEBUG nova.compute.utils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.168079] env[62383]: DEBUG nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.168169] env[62383]: DEBUG nova.network.neutron [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.237921] env[62383]: DEBUG nova.policy [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f00da7582832443e9052a6cb34ddc2ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0319810679c349b89d4129e7964d2a72', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 620.385042] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.385299] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquired lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.385391] env[62383]: DEBUG nova.network.neutron [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 620.404664] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520ac379-9a4d-cce0-b742-99ad7e21cb3b, 'name': SearchDatastore_Task, 'duration_secs': 0.011724} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.407370] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.407603] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.408278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.408278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.408278] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.408750] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4cdc011-01d5-40d0-99db-f49acd8f35b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.423554] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.423750] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.424540] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc787389-e302-4119-a091-3637ca61dfca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.439662] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 620.439662] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52860edb-e22a-b556-6899-a708e4d8d510" [ 620.439662] env[62383]: _type = "Task" [ 620.439662] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.448548] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52860edb-e22a-b556-6899-a708e4d8d510, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.529978] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 620.532684] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-770b3c9c-2102-4931-a128-d2536304c260 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.542919] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 620.542919] env[62383]: value = "task-2451154" [ 620.542919] env[62383]: _type = "Task" [ 620.542919] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.546436] env[62383]: DEBUG nova.network.neutron [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Successfully created port: f2203dee-5264-47c9-93de-7653ae3131a8 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.555415] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451154, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.658438] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4594ca96-0985-4623-a27d-ab1c63b444e9 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "13db2c17-ccba-4336-929a-0d01202c5143" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.284s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 620.658694] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451153, 'name': CreateVM_Task, 'duration_secs': 0.655041} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.659882] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 620.664125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.664280] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.664603] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 620.665318] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d70dfc6-9bc8-404b-bce4-b59fe46f41d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.670240] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 620.670240] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5213e006-67f0-0768-dfca-f07c91bcc34f" [ 620.670240] env[62383]: _type = "Task" [ 620.670240] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.670953] env[62383]: DEBUG nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 620.683331] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5213e006-67f0-0768-dfca-f07c91bcc34f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.721321] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f9a768-5dfc-4694-982c-294b4eb50111 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.729088] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27ee398-f4c9-490e-bb2e-f733973d673e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.761933] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e47a9f-fd76-4531-867a-b2601e092fdc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.770491] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2a05c0-3a19-4d0d-b8b0-e3824f0416d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.786348] env[62383]: DEBUG nova.compute.provider_tree [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.938702] env[62383]: DEBUG nova.network.neutron [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.955360] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52860edb-e22a-b556-6899-a708e4d8d510, 'name': SearchDatastore_Task, 'duration_secs': 0.037167} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.955858] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5bb53dc-6556-4d6e-8567-5a7b0886ef99 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.961440] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 620.961440] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521cb654-06c7-78d1-787a-2853cc811fa1" [ 620.961440] env[62383]: _type = "Task" [ 620.961440] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.969794] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521cb654-06c7-78d1-787a-2853cc811fa1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.007541] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Updating instance_info_cache with network_info: [{"id": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "address": "fa:16:3e:d1:3e:92", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6fa0d52-c6", "ovs_interfaceid": "f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.053570] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451154, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.117032] env[62383]: DEBUG nova.network.neutron [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Updating instance_info_cache with network_info: [{"id": "b44ff87a-66f9-4720-9a57-b485496554c7", "address": "fa:16:3e:ea:36:d2", "network": {"id": "9339d260-1e91-48a5-b2d0-9bbe4d97758e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1896053422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66211918170f4869be521cd36e9248b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb44ff87a-66", "ovs_interfaceid": "b44ff87a-66f9-4720-9a57-b485496554c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.166469] env[62383]: DEBUG nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.192465] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5213e006-67f0-0768-dfca-f07c91bcc34f, 'name': SearchDatastore_Task, 'duration_secs': 0.01121} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.192856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.192984] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 621.193222] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.289204] env[62383]: DEBUG nova.scheduler.client.report [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 621.412310] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "0d992155-24fa-4836-83c9-8f188f7d7efa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.412596] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "0d992155-24fa-4836-83c9-8f188f7d7efa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.412947] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "0d992155-24fa-4836-83c9-8f188f7d7efa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.413030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "0d992155-24fa-4836-83c9-8f188f7d7efa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.413206] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "0d992155-24fa-4836-83c9-8f188f7d7efa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.415850] env[62383]: INFO nova.compute.manager [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Terminating instance [ 621.473853] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521cb654-06c7-78d1-787a-2853cc811fa1, 'name': SearchDatastore_Task, 'duration_secs': 0.012058} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.473853] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.473853] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2337e9a2-736c-4d58-ac2e-04c8ad813be4/2337e9a2-736c-4d58-ac2e-04c8ad813be4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 621.474172] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.474172] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 621.475023] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbde6321-8267-4cb7-83cf-f3f93b12c9fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.476464] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65cf53ed-6c11-4f34-966c-e33cf7daf048 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.482525] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 621.482525] env[62383]: value = "task-2451155" [ 621.482525] env[62383]: _type = "Task" [ 621.482525] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.486364] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.486565] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 621.487539] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ade2081c-e75e-4693-a9b5-1e82b5d33768 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.493109] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.495949] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 621.495949] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c94833-9526-e8cc-2d70-d154ecf0da9d" [ 621.495949] env[62383]: _type = "Task" [ 621.495949] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.503665] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c94833-9526-e8cc-2d70-d154ecf0da9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.510248] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-8e911bad-5408-4588-9865-912ce4457d34" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.510466] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 621.510677] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.510948] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.511179] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.511391] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.511596] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.511828] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.511930] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 621.512120] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.557325] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451154, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.619237] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Releasing lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.619671] env[62383]: DEBUG nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Instance network_info: |[{"id": "b44ff87a-66f9-4720-9a57-b485496554c7", "address": "fa:16:3e:ea:36:d2", "network": {"id": "9339d260-1e91-48a5-b2d0-9bbe4d97758e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1896053422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66211918170f4869be521cd36e9248b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb44ff87a-66", "ovs_interfaceid": "b44ff87a-66f9-4720-9a57-b485496554c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 621.620434] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:36:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b44ff87a-66f9-4720-9a57-b485496554c7', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.629132] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Creating folder: Project (66211918170f4869be521cd36e9248b8). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.629132] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1c204d3-e395-4d76-aec8-14ff01a80468 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.640347] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Created folder: Project (66211918170f4869be521cd36e9248b8) in parent group-v496304. [ 621.640563] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Creating folder: Instances. Parent ref: group-v496374. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 621.640808] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bd83bf5-bfc7-47a6-ab24-1fc9e5e22439 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.650442] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Created folder: Instances in parent group-v496374. [ 621.650680] env[62383]: DEBUG oslo.service.loopingcall [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 621.650863] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 621.651066] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c482cbea-d081-4d84-8aa2-2834e2e805f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.670815] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.670815] env[62383]: value = "task-2451158" [ 621.670815] env[62383]: _type = "Task" [ 621.670815] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.681501] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451158, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.688319] env[62383]: DEBUG nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 621.699338] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.716027] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 621.716027] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.716027] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 621.716239] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.716239] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 621.716239] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 621.716639] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 621.716983] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 621.717276] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 621.717547] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 621.717823] env[62383]: DEBUG nova.virt.hardware [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 621.718831] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27692b70-a3f5-4b2d-9ade-f0a10ff93d25 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.727850] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f50c8e4-8a66-4cc5-896b-f211669c61c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.795251] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.134s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.804411] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.146s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.804571] env[62383]: INFO nova.compute.claims [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.829234] env[62383]: DEBUG nova.compute.manager [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Received event network-changed-b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 621.829451] env[62383]: DEBUG nova.compute.manager [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Refreshing instance network info cache due to event network-changed-b44ff87a-66f9-4720-9a57-b485496554c7. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 621.829783] env[62383]: DEBUG oslo_concurrency.lockutils [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] Acquiring lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.829916] env[62383]: DEBUG oslo_concurrency.lockutils [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] Acquired lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.830887] env[62383]: DEBUG nova.network.neutron [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Refreshing network info cache for port b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 621.833373] env[62383]: INFO nova.scheduler.client.report [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted allocations for instance a170fd95-3f7f-4315-a063-b9d02a7a1af4 [ 621.921076] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "refresh_cache-0d992155-24fa-4836-83c9-8f188f7d7efa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.921076] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquired lock "refresh_cache-0d992155-24fa-4836-83c9-8f188f7d7efa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.921076] env[62383]: DEBUG nova.network.neutron [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.993655] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451155, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.006803] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c94833-9526-e8cc-2d70-d154ecf0da9d, 'name': SearchDatastore_Task, 'duration_secs': 0.009067} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.007615] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a3e92bd-7d5a-4d03-953b-87396c9ffde7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.015062] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.015548] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 622.015548] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521322e5-baad-c75f-6237-ad8a7ec7c453" [ 622.015548] env[62383]: _type = "Task" [ 622.015548] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.025871] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521322e5-baad-c75f-6237-ad8a7ec7c453, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.036818] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "13db2c17-ccba-4336-929a-0d01202c5143" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.037191] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "13db2c17-ccba-4336-929a-0d01202c5143" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.037560] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "13db2c17-ccba-4336-929a-0d01202c5143-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.037805] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "13db2c17-ccba-4336-929a-0d01202c5143-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.038067] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "13db2c17-ccba-4336-929a-0d01202c5143-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.040844] env[62383]: INFO nova.compute.manager [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Terminating instance [ 622.058538] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451154, 'name': CloneVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.183243] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451158, 'name': CreateVM_Task, 'duration_secs': 0.512462} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.183444] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 622.184176] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.184381] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.184874] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 622.185239] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d71cd02-836f-4dcd-b607-e272f5bed5eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.190830] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 622.190830] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ffbcaa-081d-cbba-5759-0ba0d20db380" [ 622.190830] env[62383]: _type = "Task" [ 622.190830] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.200611] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ffbcaa-081d-cbba-5759-0ba0d20db380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.342044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63983577-9640-4e1b-958a-37774e357cf8 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a170fd95-3f7f-4315-a063-b9d02a7a1af4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.023s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.423265] env[62383]: DEBUG nova.network.neutron [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Successfully updated port: f2203dee-5264-47c9-93de-7653ae3131a8 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.442716] env[62383]: DEBUG nova.network.neutron [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.493285] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451155, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592814} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.493538] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2337e9a2-736c-4d58-ac2e-04c8ad813be4/2337e9a2-736c-4d58-ac2e-04c8ad813be4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 622.493749] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 622.494062] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5819f0e7-bb80-44e4-b415-448e38bef5ec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.504141] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 622.504141] env[62383]: value = "task-2451159" [ 622.504141] env[62383]: _type = "Task" [ 622.504141] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.513095] env[62383]: DEBUG nova.network.neutron [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.526312] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451159, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.533349] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521322e5-baad-c75f-6237-ad8a7ec7c453, 'name': SearchDatastore_Task, 'duration_secs': 0.043668} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.533838] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.533838] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.534112] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-586e6792-f3e7-4876-aad0-c0eaab08dbed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.540409] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 622.540409] env[62383]: value = "task-2451160" [ 622.540409] env[62383]: _type = "Task" [ 622.540409] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.547102] env[62383]: DEBUG nova.compute.manager [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.547102] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.550088] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40661a54-4d3e-4f1a-9670-50939b14c3c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.555945] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451160, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.562916] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.566560] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abb66fa2-295e-4cd9-93f0-19de6316683b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.568242] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451154, 'name': CloneVM_Task, 'duration_secs': 1.794346} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.568510] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Created linked-clone VM from snapshot [ 622.569632] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68905109-6a84-457f-876a-7d0bab8857f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.573756] env[62383]: DEBUG oslo_vmware.api [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 622.573756] env[62383]: value = "task-2451161" [ 622.573756] env[62383]: _type = "Task" [ 622.573756] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.582170] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Uploading image 7f496ddb-c953-495b-950c-a9868bc5e78a {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 622.587182] env[62383]: DEBUG oslo_vmware.api [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.595255] env[62383]: DEBUG nova.network.neutron [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Updated VIF entry in instance network info cache for port b44ff87a-66f9-4720-9a57-b485496554c7. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 622.595661] env[62383]: DEBUG nova.network.neutron [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Updating instance_info_cache with network_info: [{"id": "b44ff87a-66f9-4720-9a57-b485496554c7", "address": "fa:16:3e:ea:36:d2", "network": {"id": "9339d260-1e91-48a5-b2d0-9bbe4d97758e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1896053422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66211918170f4869be521cd36e9248b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb44ff87a-66", "ovs_interfaceid": "b44ff87a-66f9-4720-9a57-b485496554c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.616372] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 622.616372] env[62383]: value = "vm-496373" [ 622.616372] env[62383]: _type = "VirtualMachine" [ 622.616372] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 622.616899] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a791e2f1-0cb8-4db2-a431-a6b252c5a204 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.624612] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lease: (returnval){ [ 622.624612] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f6d8c7-ddbd-440d-2be4-05af77682dc5" [ 622.624612] env[62383]: _type = "HttpNfcLease" [ 622.624612] env[62383]: } obtained for exporting VM: (result){ [ 622.624612] env[62383]: value = "vm-496373" [ 622.624612] env[62383]: _type = "VirtualMachine" [ 622.624612] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 622.624999] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the lease: (returnval){ [ 622.624999] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f6d8c7-ddbd-440d-2be4-05af77682dc5" [ 622.624999] env[62383]: _type = "HttpNfcLease" [ 622.624999] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 622.633317] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 622.633317] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f6d8c7-ddbd-440d-2be4-05af77682dc5" [ 622.633317] env[62383]: _type = "HttpNfcLease" [ 622.633317] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 622.702963] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ffbcaa-081d-cbba-5759-0ba0d20db380, 'name': SearchDatastore_Task, 'duration_secs': 0.009785} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.703296] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.703538] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.703787] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.703931] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.704143] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.704392] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1834d67-7dff-4602-a140-63628897c098 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.713076] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.713272] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 622.714079] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a1247bf-e064-479d-bc82-140f880d1f7b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.719571] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 622.719571] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5265d0ea-5834-2cd3-ff9a-d8eb85e57497" [ 622.719571] env[62383]: _type = "Task" [ 622.719571] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.728947] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5265d0ea-5834-2cd3-ff9a-d8eb85e57497, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.926212] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "refresh_cache-60535a30-4602-4063-94a4-30ed01266d5b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.926212] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired lock "refresh_cache-60535a30-4602-4063-94a4-30ed01266d5b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.926212] env[62383]: DEBUG nova.network.neutron [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 623.015497] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451159, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.157323} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.018744] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 623.019328] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Releasing lock "refresh_cache-0d992155-24fa-4836-83c9-8f188f7d7efa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.019831] env[62383]: DEBUG nova.compute.manager [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 623.020116] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 623.021318] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36ed4dd-333a-4c35-93ea-72f7178a8d22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.024741] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1da0fb-5371-40aa-8bee-3659aff5b32b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.054980] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 2337e9a2-736c-4d58-ac2e-04c8ad813be4/2337e9a2-736c-4d58-ac2e-04c8ad813be4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 623.055475] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 623.061797] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66558c46-6431-4fa2-af96-77b7dd4574f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.077457] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50e36ecc-3ad7-491e-b51a-381e8f0911a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.088679] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451160, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.090777] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 623.090777] env[62383]: value = "task-2451164" [ 623.090777] env[62383]: _type = "Task" [ 623.090777] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.097110] env[62383]: DEBUG oslo_vmware.api [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451161, 'name': PowerOffVM_Task, 'duration_secs': 0.200873} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.097403] env[62383]: DEBUG oslo_vmware.api [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 623.097403] env[62383]: value = "task-2451163" [ 623.097403] env[62383]: _type = "Task" [ 623.097403] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.100691] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.100909] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.101481] env[62383]: DEBUG oslo_concurrency.lockutils [req-988bf7a9-70da-4051-a80f-f6b534575662 req-72f28f28-25d5-4686-862e-6115a0d34ae2 service nova] Releasing lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.101831] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e5a7a0e-99a6-4bcf-8568-afc4a80bca5a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.111145] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451164, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.116443] env[62383]: DEBUG oslo_vmware.api [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.134385] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 623.134385] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f6d8c7-ddbd-440d-2be4-05af77682dc5" [ 623.134385] env[62383]: _type = "HttpNfcLease" [ 623.134385] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 623.134676] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 623.134676] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f6d8c7-ddbd-440d-2be4-05af77682dc5" [ 623.134676] env[62383]: _type = "HttpNfcLease" [ 623.134676] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 623.135503] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec235bd-f784-433c-8cbd-2228dcde51cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.146662] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0d1e-9584-1a00-e8fd-a95ea92fb263/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 623.147032] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0d1e-9584-1a00-e8fd-a95ea92fb263/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 623.209575] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.209790] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.209964] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleting the datastore file [datastore2] 13db2c17-ccba-4336-929a-0d01202c5143 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.211943] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd2bab63-5de9-4e06-b950-471f44be24f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.218506] env[62383]: DEBUG oslo_vmware.api [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 623.218506] env[62383]: value = "task-2451166" [ 623.218506] env[62383]: _type = "Task" [ 623.218506] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.229514] env[62383]: DEBUG oslo_vmware.api [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.235633] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5265d0ea-5834-2cd3-ff9a-d8eb85e57497, 'name': SearchDatastore_Task, 'duration_secs': 0.010016} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.236628] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-478375c3-dc4e-4a6d-a32f-a08e1496c9f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.246744] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 623.246744] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5221898e-da0b-05bf-7e63-fbf68589cbd5" [ 623.246744] env[62383]: _type = "Task" [ 623.246744] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.251040] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9a72368f-5575-465c-ae9f-49bdfeef0571 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.263811] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5221898e-da0b-05bf-7e63-fbf68589cbd5, 'name': SearchDatastore_Task, 'duration_secs': 0.009812} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.266117] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.266378] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] e51a0dd7-b5da-44cb-9cd8-62932aec3ad5/e51a0dd7-b5da-44cb-9cd8-62932aec3ad5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 623.268248] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1f795ee-0b2d-45e4-b353-dbf88ebf303e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.275237] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 623.275237] env[62383]: value = "task-2451167" [ 623.275237] env[62383]: _type = "Task" [ 623.275237] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.288102] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.490038] env[62383]: DEBUG nova.network.neutron [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.494470] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02594c6a-c534-443d-a67c-17ea191d113f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.512678] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958bef90-4881-482d-820a-a1f60d850357 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.559404] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b50caf8-ab2f-419f-b60e-5ebe35b8b894 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.570350] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451160, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.566824} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.573103] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.573382] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.573761] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5b06a6e-6230-45c7-96f0-174119d74942 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.577121] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0da107-b095-4f1f-8a7b-4b453b1057cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.596129] env[62383]: DEBUG nova.compute.provider_tree [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.599580] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 623.599580] env[62383]: value = "task-2451168" [ 623.599580] env[62383]: _type = "Task" [ 623.599580] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.623829] env[62383]: DEBUG oslo_vmware.api [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451163, 'name': PowerOffVM_Task, 'duration_secs': 0.130852} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.630493] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.630715] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.631110] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.631746] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451164, 'name': ReconfigVM_Task, 'duration_secs': 0.33386} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.631969] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7eed69e-b32d-44b3-ba6c-1b589ec8499e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.634047] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 2337e9a2-736c-4d58-ac2e-04c8ad813be4/2337e9a2-736c-4d58-ac2e-04c8ad813be4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 623.635109] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36f65eb9-1860-4645-8eed-976abb923f6d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.643739] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 623.643739] env[62383]: value = "task-2451169" [ 623.643739] env[62383]: _type = "Task" [ 623.643739] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.654349] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451169, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.666338] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.666849] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.667081] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Deleting the datastore file [datastore2] 0d992155-24fa-4836-83c9-8f188f7d7efa {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.668174] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-befe73fb-37d0-494e-8e6a-4200b2db6bf7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.676021] env[62383]: DEBUG oslo_vmware.api [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for the task: (returnval){ [ 623.676021] env[62383]: value = "task-2451171" [ 623.676021] env[62383]: _type = "Task" [ 623.676021] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.687155] env[62383]: DEBUG oslo_vmware.api [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.710243] env[62383]: DEBUG nova.network.neutron [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Updating instance_info_cache with network_info: [{"id": "f2203dee-5264-47c9-93de-7653ae3131a8", "address": "fa:16:3e:67:cd:0a", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2203dee-52", "ovs_interfaceid": "f2203dee-5264-47c9-93de-7653ae3131a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.732695] env[62383]: DEBUG oslo_vmware.api [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169036} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.733138] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.733350] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 623.733614] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.733837] env[62383]: INFO nova.compute.manager [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Took 1.19 seconds to destroy the instance on the hypervisor. [ 623.734167] env[62383]: DEBUG oslo.service.loopingcall [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 623.734431] env[62383]: DEBUG nova.compute.manager [-] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 623.734608] env[62383]: DEBUG nova.network.neutron [-] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 623.785227] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.4816} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.785808] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] e51a0dd7-b5da-44cb-9cd8-62932aec3ad5/e51a0dd7-b5da-44cb-9cd8-62932aec3ad5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.786327] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.786327] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13466714-b2a5-4377-8cfa-2c6916814f30 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.793120] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 623.793120] env[62383]: value = "task-2451172" [ 623.793120] env[62383]: _type = "Task" [ 623.793120] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.806065] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.929494] env[62383]: DEBUG nova.compute.manager [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Received event network-vif-plugged-f2203dee-5264-47c9-93de-7653ae3131a8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 623.930330] env[62383]: DEBUG oslo_concurrency.lockutils [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] Acquiring lock "60535a30-4602-4063-94a4-30ed01266d5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.930611] env[62383]: DEBUG oslo_concurrency.lockutils [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] Lock "60535a30-4602-4063-94a4-30ed01266d5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.931080] env[62383]: DEBUG oslo_concurrency.lockutils [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] Lock "60535a30-4602-4063-94a4-30ed01266d5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.931379] env[62383]: DEBUG nova.compute.manager [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] No waiting events found dispatching network-vif-plugged-f2203dee-5264-47c9-93de-7653ae3131a8 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 623.931653] env[62383]: WARNING nova.compute.manager [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Received unexpected event network-vif-plugged-f2203dee-5264-47c9-93de-7653ae3131a8 for instance with vm_state building and task_state spawning. [ 623.931956] env[62383]: DEBUG nova.compute.manager [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Received event network-changed-f2203dee-5264-47c9-93de-7653ae3131a8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 623.932254] env[62383]: DEBUG nova.compute.manager [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Refreshing instance network info cache due to event network-changed-f2203dee-5264-47c9-93de-7653ae3131a8. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 623.932440] env[62383]: DEBUG oslo_concurrency.lockutils [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] Acquiring lock "refresh_cache-60535a30-4602-4063-94a4-30ed01266d5b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.098308] env[62383]: DEBUG nova.compute.manager [req-07f28db3-679b-48a4-a57e-d7bb66c011a1 req-cf52eafa-7d1f-4012-aa4f-9eb1a05709e6 service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Received event network-vif-deleted-f04b9ac5-a0ac-4535-944c-01e2d59d6db4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 624.098815] env[62383]: INFO nova.compute.manager [req-07f28db3-679b-48a4-a57e-d7bb66c011a1 req-cf52eafa-7d1f-4012-aa4f-9eb1a05709e6 service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Neutron deleted interface f04b9ac5-a0ac-4535-944c-01e2d59d6db4; detaching it from the instance and deleting it from the info cache [ 624.099120] env[62383]: DEBUG nova.network.neutron [req-07f28db3-679b-48a4-a57e-d7bb66c011a1 req-cf52eafa-7d1f-4012-aa4f-9eb1a05709e6 service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.101471] env[62383]: DEBUG nova.scheduler.client.report [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 624.119653] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.198729} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.119979] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 624.121052] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef90157-36b7-40b9-b62d-cf7bcdfb51f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.142824] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 624.143669] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95912416-fb73-480f-90f0-d6a0fb650265 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.171761] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451169, 'name': Rename_Task, 'duration_secs': 0.219272} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.174057] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.174057] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 624.174057] env[62383]: value = "task-2451173" [ 624.174057] env[62383]: _type = "Task" [ 624.174057] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.174057] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd29678a-e97e-45ab-9c0a-2d7f8ff468aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.183584] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 624.183584] env[62383]: value = "task-2451174" [ 624.183584] env[62383]: _type = "Task" [ 624.183584] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.187601] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451173, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.193849] env[62383]: DEBUG oslo_vmware.api [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Task: {'id': task-2451171, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15931} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.194589] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 624.194777] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 624.194944] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.195162] env[62383]: INFO nova.compute.manager [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Took 1.18 seconds to destroy the instance on the hypervisor. [ 624.195416] env[62383]: DEBUG oslo.service.loopingcall [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.195704] env[62383]: DEBUG nova.compute.manager [-] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 624.195780] env[62383]: DEBUG nova.network.neutron [-] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.201756] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451174, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.213439] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Releasing lock "refresh_cache-60535a30-4602-4063-94a4-30ed01266d5b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.213792] env[62383]: DEBUG nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Instance network_info: |[{"id": "f2203dee-5264-47c9-93de-7653ae3131a8", "address": "fa:16:3e:67:cd:0a", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2203dee-52", "ovs_interfaceid": "f2203dee-5264-47c9-93de-7653ae3131a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 624.214188] env[62383]: DEBUG oslo_concurrency.lockutils [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] Acquired lock "refresh_cache-60535a30-4602-4063-94a4-30ed01266d5b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.214512] env[62383]: DEBUG nova.network.neutron [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Refreshing network info cache for port f2203dee-5264-47c9-93de-7653ae3131a8 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 624.216024] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:cd:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2203dee-5264-47c9-93de-7653ae3131a8', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.225029] env[62383]: DEBUG oslo.service.loopingcall [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.226322] env[62383]: DEBUG nova.network.neutron [-] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.228090] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 624.228518] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91785fcb-93fd-4bc9-b577-cbb7e40fe9ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.249393] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.249393] env[62383]: value = "task-2451175" [ 624.249393] env[62383]: _type = "Task" [ 624.249393] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.259235] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451175, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.305611] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136051} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.305887] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 624.306696] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4116cc8-1c4e-491d-b8e3-423d0f79e93f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.330281] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] e51a0dd7-b5da-44cb-9cd8-62932aec3ad5/e51a0dd7-b5da-44cb-9cd8-62932aec3ad5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 624.330605] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-791bfaee-4165-4dd5-a069-b7dde69f4830 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.353399] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 624.353399] env[62383]: value = "task-2451176" [ 624.353399] env[62383]: _type = "Task" [ 624.353399] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.361281] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451176, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.522386] env[62383]: DEBUG nova.network.neutron [-] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.606886] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eff298d5-b102-487a-a2bd-85113d2f3ee4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.610126] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.808s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.610641] env[62383]: DEBUG nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 624.614383] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.711s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.614734] env[62383]: DEBUG nova.objects.instance [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lazy-loading 'resources' on Instance uuid eedadcc7-d02e-4a21-a43a-1dccde81b3b4 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 624.625246] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb7b99e-2867-4c85-8463-93c4bd66a8ac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.664457] env[62383]: DEBUG nova.compute.manager [req-07f28db3-679b-48a4-a57e-d7bb66c011a1 req-cf52eafa-7d1f-4012-aa4f-9eb1a05709e6 service nova] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Detach interface failed, port_id=f04b9ac5-a0ac-4535-944c-01e2d59d6db4, reason: Instance 13db2c17-ccba-4336-929a-0d01202c5143 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 624.686217] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451173, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.696700] env[62383]: DEBUG oslo_vmware.api [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451174, 'name': PowerOnVM_Task, 'duration_secs': 0.473293} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.697820] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 624.697820] env[62383]: INFO nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Took 9.61 seconds to spawn the instance on the hypervisor. [ 624.697820] env[62383]: DEBUG nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 624.699030] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75f63c0-69e5-4ed6-b246-c781f5ef0606 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.729135] env[62383]: DEBUG nova.network.neutron [-] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.759166] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451175, 'name': CreateVM_Task, 'duration_secs': 0.366275} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.759314] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 624.759961] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.760150] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.760525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 624.763086] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d29291f-f269-42dd-a773-2f9ee7a44ce8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.767889] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 624.767889] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52536d77-317a-bc92-0d56-c27b4cddbff8" [ 624.767889] env[62383]: _type = "Task" [ 624.767889] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.776462] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52536d77-317a-bc92-0d56-c27b4cddbff8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.864993] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451176, 'name': ReconfigVM_Task, 'duration_secs': 0.363846} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.865871] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Reconfigured VM instance instance-0000001a to attach disk [datastore1] e51a0dd7-b5da-44cb-9cd8-62932aec3ad5/e51a0dd7-b5da-44cb-9cd8-62932aec3ad5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 624.866031] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8121d39-31c1-44db-986d-e13d00352614 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.873571] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 624.873571] env[62383]: value = "task-2451177" [ 624.873571] env[62383]: _type = "Task" [ 624.873571] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.880521] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451177, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.960980] env[62383]: DEBUG nova.network.neutron [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Updated VIF entry in instance network info cache for port f2203dee-5264-47c9-93de-7653ae3131a8. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 624.961387] env[62383]: DEBUG nova.network.neutron [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Updating instance_info_cache with network_info: [{"id": "f2203dee-5264-47c9-93de-7653ae3131a8", "address": "fa:16:3e:67:cd:0a", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.166", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2203dee-52", "ovs_interfaceid": "f2203dee-5264-47c9-93de-7653ae3131a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.024985] env[62383]: INFO nova.compute.manager [-] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Took 1.29 seconds to deallocate network for instance. [ 625.122102] env[62383]: DEBUG nova.compute.utils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.124305] env[62383]: DEBUG nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Not allocating networking since 'none' was specified. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 625.194289] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451173, 'name': ReconfigVM_Task, 'duration_secs': 0.634118} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.194289] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8/69569fa0-5175-453e-9875-9ef46c723da8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.194743] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bee5922-351d-4c03-974f-cf57a4fe6d00 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.203949] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 625.203949] env[62383]: value = "task-2451178" [ 625.203949] env[62383]: _type = "Task" [ 625.203949] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.218329] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451178, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.224545] env[62383]: INFO nova.compute.manager [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Took 40.12 seconds to build instance. [ 625.232764] env[62383]: INFO nova.compute.manager [-] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Took 1.04 seconds to deallocate network for instance. [ 625.278382] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52536d77-317a-bc92-0d56-c27b4cddbff8, 'name': SearchDatastore_Task, 'duration_secs': 0.013306} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.282132] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.282132] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.282298] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.282441] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.285161] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.285161] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-680f22a9-0be4-4285-b536-6f54d3ee430b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.293475] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 625.293536] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 625.297170] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff9a1fb3-f7bf-446d-a682-69cec325104d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.302856] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 625.302856] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fb4337-c658-db1c-baa7-0b5d63b6f0fd" [ 625.302856] env[62383]: _type = "Task" [ 625.302856] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.310642] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fb4337-c658-db1c-baa7-0b5d63b6f0fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.383319] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451177, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.464780] env[62383]: DEBUG oslo_concurrency.lockutils [req-0fd90614-8ae1-432d-8adf-ba02639ad855 req-d64e439f-1658-4ed2-b456-fd28b9b93d9a service nova] Releasing lock "refresh_cache-60535a30-4602-4063-94a4-30ed01266d5b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.531560] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.625599] env[62383]: DEBUG nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 625.697247] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee78c6a3-0600-455d-807c-0da5a11b7386 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.705372] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994d2d41-932b-4905-bea3-0815abf9e21d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.717364] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451178, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.742626] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8feef644-64be-43e9-b17e-6e0696cfe6bf tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.580s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.743895] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.745862] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50c7ef9-eb46-4078-806a-ea2237c9ee19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.753791] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa91051-9016-4b83-a718-5dac563861b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.776158] env[62383]: DEBUG nova.compute.provider_tree [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.816190] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fb4337-c658-db1c-baa7-0b5d63b6f0fd, 'name': SearchDatastore_Task, 'duration_secs': 0.01663} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.817180] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd95b4fa-e264-488c-86db-1bf2984b6470 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.824097] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 625.824097] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5293c4ed-b3f9-d58f-331f-c342db1d7c0b" [ 625.824097] env[62383]: _type = "Task" [ 625.824097] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.832435] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5293c4ed-b3f9-d58f-331f-c342db1d7c0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.888800] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451177, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.218878] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451178, 'name': Rename_Task, 'duration_secs': 0.694733} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.219217] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.219514] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13d94fcd-fc7f-49a9-875f-3428006dd1b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.226086] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Waiting for the task: (returnval){ [ 626.226086] env[62383]: value = "task-2451179" [ 626.226086] env[62383]: _type = "Task" [ 626.226086] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.234497] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.249326] env[62383]: DEBUG nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 626.283567] env[62383]: DEBUG nova.scheduler.client.report [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.336058] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5293c4ed-b3f9-d58f-331f-c342db1d7c0b, 'name': SearchDatastore_Task, 'duration_secs': 0.013397} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.336353] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.336629] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 60535a30-4602-4063-94a4-30ed01266d5b/60535a30-4602-4063-94a4-30ed01266d5b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 626.336891] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1393851-d36f-40a7-8b3d-0c87c66711dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.343378] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 626.343378] env[62383]: value = "task-2451180" [ 626.343378] env[62383]: _type = "Task" [ 626.343378] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.351953] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.367180] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "a10f5b03-c45b-4cc2-923f-3227665d236c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.367498] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.385371] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451177, 'name': Rename_Task, 'duration_secs': 1.143495} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.385664] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.385921] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edcc5166-440c-4f99-8593-69f1ae962c98 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.392738] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 626.392738] env[62383]: value = "task-2451181" [ 626.392738] env[62383]: _type = "Task" [ 626.392738] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.400972] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.635093] env[62383]: DEBUG nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 626.661796] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 626.661796] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.662053] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 626.662250] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.662471] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 626.662762] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 626.663055] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 626.663261] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 626.663439] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 626.663646] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 626.663941] env[62383]: DEBUG nova.virt.hardware [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 626.664766] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f23889f-6b65-4a2c-8c03-e589936c8b89 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.673648] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e915385-4512-4e83-a048-6f64d464e988 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.692043] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.698205] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Creating folder: Project (059b4194747541da87c07f464546121d). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.698682] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1a7b2c1-5df5-418c-8f43-c0b3a7350793 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.710088] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Created folder: Project (059b4194747541da87c07f464546121d) in parent group-v496304. [ 626.710684] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Creating folder: Instances. Parent ref: group-v496378. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.710684] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-efdf4a9a-a376-4ff4-9750-f89b2c029305 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.720472] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Created folder: Instances in parent group-v496378. [ 626.720694] env[62383]: DEBUG oslo.service.loopingcall [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 626.720950] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.721198] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70e8a320-060e-451f-b599-9481bee2e009 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.742657] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451179, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.744286] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.744286] env[62383]: value = "task-2451184" [ 626.744286] env[62383]: _type = "Task" [ 626.744286] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.755645] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451184, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.779183] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.792290] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.178s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.795607] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.048s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.796779] env[62383]: INFO nova.compute.claims [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 626.817218] env[62383]: INFO nova.scheduler.client.report [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Deleted allocations for instance eedadcc7-d02e-4a21-a43a-1dccde81b3b4 [ 626.855910] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451180, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.907043] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451181, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.246515] env[62383]: DEBUG oslo_vmware.api [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Task: {'id': task-2451179, 'name': PowerOnVM_Task, 'duration_secs': 0.52326} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.251757] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.251860] env[62383]: DEBUG nova.compute.manager [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.252666] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145d8b87-34ba-4757-a1d0-91e6d8db2000 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.260822] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451184, 'name': CreateVM_Task, 'duration_secs': 0.411066} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.262124] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.265046] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.265193] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.265519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 627.265987] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eaab4de-a269-407d-911c-e33544e790c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.270571] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 627.270571] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52773ef2-6a89-bf90-3bb4-22c8667de209" [ 627.270571] env[62383]: _type = "Task" [ 627.270571] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.278772] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52773ef2-6a89-bf90-3bb4-22c8667de209, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.326373] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bdacadcc-83d9-4b8b-b7fc-cb7861f2fb7a tempest-ServerPasswordTestJSON-1517103748 tempest-ServerPasswordTestJSON-1517103748-project-member] Lock "eedadcc7-d02e-4a21-a43a-1dccde81b3b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.782s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.354812] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451180, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.404855] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451181, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.776222] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.782314] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52773ef2-6a89-bf90-3bb4-22c8667de209, 'name': SearchDatastore_Task, 'duration_secs': 0.010559} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.782641] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.782898] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.783154] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.783298] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.783474] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.783724] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed48682c-5850-4938-9c2f-85e7b7bb24d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.795403] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.795536] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.796334] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbf6fdf1-28d0-4006-8565-7a489254e876 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.801952] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 627.801952] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52cd77d7-4ad1-54c8-ecf3-86e1a61dd4cb" [ 627.801952] env[62383]: _type = "Task" [ 627.801952] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.816205] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cd77d7-4ad1-54c8-ecf3-86e1a61dd4cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.858471] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451180, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.908042] env[62383]: DEBUG oslo_vmware.api [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451181, 'name': PowerOnVM_Task, 'duration_secs': 1.036845} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.910539] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.911029] env[62383]: INFO nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Took 9.02 seconds to spawn the instance on the hypervisor. [ 627.911406] env[62383]: DEBUG nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.913194] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7456a82-67de-421d-b8c2-3bb0e4cf653b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.319046] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cd77d7-4ad1-54c8-ecf3-86e1a61dd4cb, 'name': SearchDatastore_Task, 'duration_secs': 0.031739} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.320199] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e44b29ea-67b8-475f-b185-15b783c6a89d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.326495] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 628.326495] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52271347-e2c2-2a66-bcb8-9359da6ed6a1" [ 628.326495] env[62383]: _type = "Task" [ 628.326495] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.338522] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52271347-e2c2-2a66-bcb8-9359da6ed6a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.355781] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451180, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.595175} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.356916] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 60535a30-4602-4063-94a4-30ed01266d5b/60535a30-4602-4063-94a4-30ed01266d5b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.356916] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.356916] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97e32c49-ea4a-469e-90f8-8c000fa2bbf2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.364832] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 628.364832] env[62383]: value = "task-2451185" [ 628.364832] env[62383]: _type = "Task" [ 628.364832] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.375657] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451185, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.439016] env[62383]: INFO nova.compute.manager [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Took 36.82 seconds to build instance. [ 628.447107] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c764feb0-6fea-4329-9954-d20631a3d59f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.456109] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0844c428-4240-4f39-a1c5-90e951dc9442 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.491073] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae30f2f-9830-4e84-9720-37787a264045 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.499757] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c273ba-3590-455f-9f9c-d4c6e898e872 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.514933] env[62383]: DEBUG nova.compute.provider_tree [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.739967] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "69569fa0-5175-453e-9875-9ef46c723da8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.740384] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "69569fa0-5175-453e-9875-9ef46c723da8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.740679] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "69569fa0-5175-453e-9875-9ef46c723da8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.740970] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "69569fa0-5175-453e-9875-9ef46c723da8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.741258] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "69569fa0-5175-453e-9875-9ef46c723da8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.743399] env[62383]: INFO nova.compute.manager [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Terminating instance [ 628.837931] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52271347-e2c2-2a66-bcb8-9359da6ed6a1, 'name': SearchDatastore_Task, 'duration_secs': 0.01697} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.838252] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.838601] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.838931] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36608c4f-76aa-4b29-a6c7-7c99d65e9b94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.847029] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 628.847029] env[62383]: value = "task-2451186" [ 628.847029] env[62383]: _type = "Task" [ 628.847029] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.856606] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.874412] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451185, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122375} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.874705] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.875524] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c28d9d4-d758-4d1e-b86e-2119ee96ad62 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.898033] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 60535a30-4602-4063-94a4-30ed01266d5b/60535a30-4602-4063-94a4-30ed01266d5b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.898488] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d40a4b2-e491-4988-a494-aa8801f57dfe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.918623] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 628.918623] env[62383]: value = "task-2451187" [ 628.918623] env[62383]: _type = "Task" [ 628.918623] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.926955] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.943725] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7228b2e-34a8-4da5-8a06-add27b2cc07e tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.960s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.018175] env[62383]: DEBUG nova.scheduler.client.report [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 629.250252] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "refresh_cache-69569fa0-5175-453e-9875-9ef46c723da8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.250252] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquired lock "refresh_cache-69569fa0-5175-453e-9875-9ef46c723da8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.250252] env[62383]: DEBUG nova.network.neutron [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.368033] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451186, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.429671] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451187, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.447462] env[62383]: DEBUG nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 629.524480] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.525069] env[62383]: DEBUG nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 629.529148] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.234s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.529400] env[62383]: DEBUG nova.objects.instance [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lazy-loading 'resources' on Instance uuid dd0ad4e3-a6e6-4258-b960-544984e24ebc {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 629.686056] env[62383]: DEBUG nova.compute.manager [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Received event network-changed-b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 629.686260] env[62383]: DEBUG nova.compute.manager [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Refreshing instance network info cache due to event network-changed-b44ff87a-66f9-4720-9a57-b485496554c7. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 629.686591] env[62383]: DEBUG oslo_concurrency.lockutils [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] Acquiring lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.686734] env[62383]: DEBUG oslo_concurrency.lockutils [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] Acquired lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.686984] env[62383]: DEBUG nova.network.neutron [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Refreshing network info cache for port b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 629.782271] env[62383]: DEBUG nova.network.neutron [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.868333] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617862} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.869131] env[62383]: DEBUG nova.network.neutron [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.870402] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 629.870623] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 629.871184] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fdc23a7b-d099-425e-89ac-e647021ebc80 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.878690] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 629.878690] env[62383]: value = "task-2451188" [ 629.878690] env[62383]: _type = "Task" [ 629.878690] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.889303] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.931879] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451187, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.982558] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.031450] env[62383]: DEBUG nova.compute.utils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 630.035938] env[62383]: DEBUG nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 630.035938] env[62383]: DEBUG nova.network.neutron [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 630.086606] env[62383]: DEBUG nova.policy [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '671a6c1983c64c26b3ea501f171045d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bed29fa2bc64a31b3324d7d0d01c61d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 630.379348] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Releasing lock "refresh_cache-69569fa0-5175-453e-9875-9ef46c723da8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.379852] env[62383]: DEBUG nova.compute.manager [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 630.380103] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.381374] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b374c29-2e89-45c5-9604-5ac6a937ab74 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.394095] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114939} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.399780] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.399780] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 630.401367] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf96f95a-b004-470e-a7e2-511b6764addb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.405976] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba1d18c6-a921-4b35-92ae-5f430ce7cbcd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.429534] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.439721] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-404bda1e-957a-46f4-8701-44a05fd13034 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.454861] env[62383]: DEBUG oslo_vmware.api [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 630.454861] env[62383]: value = "task-2451189" [ 630.454861] env[62383]: _type = "Task" [ 630.454861] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.456070] env[62383]: DEBUG nova.network.neutron [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Successfully created port: 3c2cbd45-1a44-495a-bfe1-6e6f90985ded {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 630.467657] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 630.467657] env[62383]: value = "task-2451190" [ 630.467657] env[62383]: _type = "Task" [ 630.467657] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.467971] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451187, 'name': ReconfigVM_Task, 'duration_secs': 1.395574} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.469144] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 60535a30-4602-4063-94a4-30ed01266d5b/60535a30-4602-4063-94a4-30ed01266d5b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 630.472715] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd09d7c1-9505-4d37-8486-d6038476cda4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.477919] env[62383]: DEBUG oslo_vmware.api [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.487381] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451190, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.489600] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 630.489600] env[62383]: value = "task-2451191" [ 630.489600] env[62383]: _type = "Task" [ 630.489600] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.505495] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451191, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.539627] env[62383]: DEBUG nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 630.632743] env[62383]: DEBUG nova.network.neutron [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Updated VIF entry in instance network info cache for port b44ff87a-66f9-4720-9a57-b485496554c7. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.633141] env[62383]: DEBUG nova.network.neutron [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Updating instance_info_cache with network_info: [{"id": "b44ff87a-66f9-4720-9a57-b485496554c7", "address": "fa:16:3e:ea:36:d2", "network": {"id": "9339d260-1e91-48a5-b2d0-9bbe4d97758e", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1896053422-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66211918170f4869be521cd36e9248b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb44ff87a-66", "ovs_interfaceid": "b44ff87a-66f9-4720-9a57-b485496554c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.757816] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9fff83-6438-48b4-bd86-789023e0b97c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.768771] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0594eaa-fbbb-4824-a71b-926d0758a4c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.807180] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a9c574-f094-4098-b860-64a83656fc75 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.816059] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37efe62f-d6d7-4bd5-9ca5-82c73044b70b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.831322] env[62383]: DEBUG nova.compute.provider_tree [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.971032] env[62383]: DEBUG oslo_vmware.api [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451189, 'name': PowerOffVM_Task, 'duration_secs': 0.208423} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.973891] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 630.973891] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 630.974504] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4090d31e-1b93-4da2-9083-b6d62a79f382 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.980971] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451190, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.000951] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451191, 'name': Rename_Task, 'duration_secs': 0.202212} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.002663] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 631.003021] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 631.003270] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 631.003497] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Deleting the datastore file [datastore1] 69569fa0-5175-453e-9875-9ef46c723da8 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 631.003771] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-389480b6-39d8-40c3-b7de-f81c9ab28ac9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.005891] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-833e28df-e50c-4c80-8901-58a8e1b415f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.013172] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 631.013172] env[62383]: value = "task-2451193" [ 631.013172] env[62383]: _type = "Task" [ 631.013172] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.018644] env[62383]: DEBUG oslo_vmware.api [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for the task: (returnval){ [ 631.018644] env[62383]: value = "task-2451194" [ 631.018644] env[62383]: _type = "Task" [ 631.018644] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.027418] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.033832] env[62383]: DEBUG oslo_vmware.api [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.139147] env[62383]: DEBUG oslo_concurrency.lockutils [req-d51b6ecf-a2cb-4590-bba1-1ae39f924d70 req-bf9ac2f0-cc65-46d5-a1c7-f068eedea2d7 service nova] Releasing lock "refresh_cache-e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.337027] env[62383]: DEBUG nova.scheduler.client.report [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 631.481151] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451190, 'name': ReconfigVM_Task, 'duration_secs': 0.574601} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.481449] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.482072] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa84e2bb-bde7-427b-9fd5-30a6fdb847a0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.489260] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 631.489260] env[62383]: value = "task-2451195" [ 631.489260] env[62383]: _type = "Task" [ 631.489260] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.497793] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451195, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.523570] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451193, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.532543] env[62383]: DEBUG oslo_vmware.api [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Task: {'id': task-2451194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.237479} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.532683] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 631.532875] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 631.533065] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.533244] env[62383]: INFO nova.compute.manager [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 631.533544] env[62383]: DEBUG oslo.service.loopingcall [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.533764] env[62383]: DEBUG nova.compute.manager [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 631.533864] env[62383]: DEBUG nova.network.neutron [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 631.551127] env[62383]: DEBUG nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 631.554207] env[62383]: DEBUG nova.network.neutron [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.573201] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 631.573526] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 631.573723] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 631.573960] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 631.574153] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 631.574321] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 631.574556] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 631.574722] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 631.574909] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 631.575097] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 631.575965] env[62383]: DEBUG nova.virt.hardware [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 631.576460] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82d2109-36ea-4588-bd75-d7485ee9b291 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.587809] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed38a334-54c5-4ce8-a008-298fceb5cd7f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.843038] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.314s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.846029] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 22.778s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.870254] env[62383]: INFO nova.scheduler.client.report [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Deleted allocations for instance dd0ad4e3-a6e6-4258-b960-544984e24ebc [ 631.918757] env[62383]: DEBUG nova.compute.manager [req-90f1eac0-f6cb-40d6-a0d4-b2543a6085ad req-860a084b-f3aa-4185-8af1-022ef21552b8 service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Received event network-vif-plugged-3c2cbd45-1a44-495a-bfe1-6e6f90985ded {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 631.918972] env[62383]: DEBUG oslo_concurrency.lockutils [req-90f1eac0-f6cb-40d6-a0d4-b2543a6085ad req-860a084b-f3aa-4185-8af1-022ef21552b8 service nova] Acquiring lock "152567ba-f24c-4674-b06e-98c76a3da324-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.919311] env[62383]: DEBUG oslo_concurrency.lockutils [req-90f1eac0-f6cb-40d6-a0d4-b2543a6085ad req-860a084b-f3aa-4185-8af1-022ef21552b8 service nova] Lock "152567ba-f24c-4674-b06e-98c76a3da324-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.919651] env[62383]: DEBUG oslo_concurrency.lockutils [req-90f1eac0-f6cb-40d6-a0d4-b2543a6085ad req-860a084b-f3aa-4185-8af1-022ef21552b8 service nova] Lock "152567ba-f24c-4674-b06e-98c76a3da324-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.919922] env[62383]: DEBUG nova.compute.manager [req-90f1eac0-f6cb-40d6-a0d4-b2543a6085ad req-860a084b-f3aa-4185-8af1-022ef21552b8 service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] No waiting events found dispatching network-vif-plugged-3c2cbd45-1a44-495a-bfe1-6e6f90985ded {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 631.920626] env[62383]: WARNING nova.compute.manager [req-90f1eac0-f6cb-40d6-a0d4-b2543a6085ad req-860a084b-f3aa-4185-8af1-022ef21552b8 service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Received unexpected event network-vif-plugged-3c2cbd45-1a44-495a-bfe1-6e6f90985ded for instance with vm_state building and task_state spawning. [ 632.000107] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451195, 'name': Rename_Task, 'duration_secs': 0.192615} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.000422] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 632.000667] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ecae959-20ca-403b-b04a-8d42b859f065 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.007818] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 632.007818] env[62383]: value = "task-2451196" [ 632.007818] env[62383]: _type = "Task" [ 632.007818] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.016085] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451196, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.024679] env[62383]: DEBUG oslo_vmware.api [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451193, 'name': PowerOnVM_Task, 'duration_secs': 0.725364} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.024949] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 632.025186] env[62383]: INFO nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Took 10.34 seconds to spawn the instance on the hypervisor. [ 632.025394] env[62383]: DEBUG nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 632.026596] env[62383]: DEBUG nova.network.neutron [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Successfully updated port: 3c2cbd45-1a44-495a-bfe1-6e6f90985ded {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 632.028576] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cb6a17-23bd-4268-80ba-eabd16283a20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.058273] env[62383]: DEBUG nova.network.neutron [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.322537] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0d1e-9584-1a00-e8fd-a95ea92fb263/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 632.323551] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3497116-ced0-4bdc-b46f-3272fcb8bea7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.329713] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0d1e-9584-1a00-e8fd-a95ea92fb263/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 632.329895] env[62383]: ERROR oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0d1e-9584-1a00-e8fd-a95ea92fb263/disk-0.vmdk due to incomplete transfer. [ 632.330296] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1be2359a-ae7e-4bbf-9c2d-dedc99ae3d54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.337918] env[62383]: DEBUG oslo_vmware.rw_handles [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0d1e-9584-1a00-e8fd-a95ea92fb263/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 632.338137] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Uploaded image 7f496ddb-c953-495b-950c-a9868bc5e78a to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 632.340116] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 632.340367] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e97c52e5-9813-433e-b3fc-edea93ac9281 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.346415] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 632.346415] env[62383]: value = "task-2451197" [ 632.346415] env[62383]: _type = "Task" [ 632.346415] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.357957] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451197, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.378696] env[62383]: DEBUG oslo_concurrency.lockutils [None req-79b2c999-4155-4298-9039-3313681f9fbd tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "dd0ad4e3-a6e6-4258-b960-544984e24ebc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.618s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.520206] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451196, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.532588] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.532830] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.532988] env[62383]: DEBUG nova.network.neutron [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.550753] env[62383]: INFO nova.compute.manager [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Took 38.58 seconds to build instance. [ 632.561645] env[62383]: INFO nova.compute.manager [-] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Took 1.03 seconds to deallocate network for instance. [ 632.855941] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451197, 'name': Destroy_Task, 'duration_secs': 0.34867} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.857052] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Destroyed the VM [ 632.857372] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 632.858304] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9688a2-4428-43d9-a1aa-318042fa09bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.860731] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bfae3884-261a-4df0-a91b-6f461c50421d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.868942] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af5951f-fe43-4d37-9945-5f33f393cf4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.872078] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 632.872078] env[62383]: value = "task-2451198" [ 632.872078] env[62383]: _type = "Task" [ 632.872078] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.900797] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd4a400-5fd9-47ad-a247-66e61ff52d96 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.906885] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451198, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.911810] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8852b68-3368-4916-ab72-d8a1fecf86f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.925399] env[62383]: DEBUG nova.compute.provider_tree [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.018258] env[62383]: DEBUG oslo_vmware.api [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451196, 'name': PowerOnVM_Task, 'duration_secs': 0.80419} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.018602] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.018913] env[62383]: INFO nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Took 6.38 seconds to spawn the instance on the hypervisor. [ 633.019055] env[62383]: DEBUG nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.019846] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1961da-014c-4b7c-8ea5-5ce156d558f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.054288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9c93d7e4-d54a-4e11-a18c-8c2c21fbcae6 tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "60535a30-4602-4063-94a4-30ed01266d5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.723s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.067015] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.079331] env[62383]: DEBUG nova.network.neutron [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.230907] env[62383]: DEBUG nova.network.neutron [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Updating instance_info_cache with network_info: [{"id": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "address": "fa:16:3e:2a:51:61", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c2cbd45-1a", "ovs_interfaceid": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.383037] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451198, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.428262] env[62383]: DEBUG nova.scheduler.client.report [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 633.539134] env[62383]: INFO nova.compute.manager [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Took 30.90 seconds to build instance. [ 633.558037] env[62383]: DEBUG nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 633.582926] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "8e911bad-5408-4588-9865-912ce4457d34" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.583352] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "8e911bad-5408-4588-9865-912ce4457d34" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.583649] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "8e911bad-5408-4588-9865-912ce4457d34-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.583921] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "8e911bad-5408-4588-9865-912ce4457d34-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.584860] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "8e911bad-5408-4588-9865-912ce4457d34-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.586986] env[62383]: INFO nova.compute.manager [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Terminating instance [ 633.708046] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "60535a30-4602-4063-94a4-30ed01266d5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.708303] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "60535a30-4602-4063-94a4-30ed01266d5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.708560] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "60535a30-4602-4063-94a4-30ed01266d5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.708765] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "60535a30-4602-4063-94a4-30ed01266d5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.708936] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "60535a30-4602-4063-94a4-30ed01266d5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.711025] env[62383]: INFO nova.compute.manager [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Terminating instance [ 633.733421] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.733746] env[62383]: DEBUG nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Instance network_info: |[{"id": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "address": "fa:16:3e:2a:51:61", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c2cbd45-1a", "ovs_interfaceid": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 633.734169] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:51:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c2cbd45-1a44-495a-bfe1-6e6f90985ded', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 633.742203] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating folder: Project (5bed29fa2bc64a31b3324d7d0d01c61d). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.742932] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c722a91-439b-496d-9513-97655f6766b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.753432] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created folder: Project (5bed29fa2bc64a31b3324d7d0d01c61d) in parent group-v496304. [ 633.753628] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating folder: Instances. Parent ref: group-v496381. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.753858] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-623365d3-51b0-4bbc-8191-3d21d03528e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.762846] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created folder: Instances in parent group-v496381. [ 633.762846] env[62383]: DEBUG oslo.service.loopingcall [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 633.762846] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 633.762846] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd350d18-b683-4669-b6c2-2d9ddeb1a913 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.783141] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 633.783141] env[62383]: value = "task-2451201" [ 633.783141] env[62383]: _type = "Task" [ 633.783141] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.791657] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451201, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.882509] env[62383]: DEBUG oslo_vmware.api [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451198, 'name': RemoveSnapshot_Task, 'duration_secs': 0.647319} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.882857] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 633.883155] env[62383]: INFO nova.compute.manager [None req-679d61d0-2150-4182-a78b-fb29db8e797e tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Took 15.43 seconds to snapshot the instance on the hypervisor. [ 633.981215] env[62383]: DEBUG nova.compute.manager [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Received event network-changed-3c2cbd45-1a44-495a-bfe1-6e6f90985ded {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 633.981432] env[62383]: DEBUG nova.compute.manager [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Refreshing instance network info cache due to event network-changed-3c2cbd45-1a44-495a-bfe1-6e6f90985ded. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 633.981651] env[62383]: DEBUG oslo_concurrency.lockutils [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] Acquiring lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.981795] env[62383]: DEBUG oslo_concurrency.lockutils [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] Acquired lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.982011] env[62383]: DEBUG nova.network.neutron [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Refreshing network info cache for port 3c2cbd45-1a44-495a-bfe1-6e6f90985ded {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.042308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12e0f806-f300-4629-bc02-d8e2fa90592f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "045e5f8f-edd5-425d-bccb-054d90db27d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.254s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.086274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.091679] env[62383]: DEBUG nova.compute.manager [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 634.091886] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 634.093016] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf2ba50-1d28-46a2-ac21-d9176b373ce7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.102121] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 634.102378] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad232b1a-c98c-4396-9f63-5f6e0988b977 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.108749] env[62383]: DEBUG oslo_vmware.api [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 634.108749] env[62383]: value = "task-2451202" [ 634.108749] env[62383]: _type = "Task" [ 634.108749] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.117277] env[62383]: DEBUG oslo_vmware.api [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.214789] env[62383]: DEBUG nova.compute.manager [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 634.215146] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 634.216514] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb69aa49-6e97-41d2-843d-9f01b2a49ab1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.225137] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 634.225375] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73901431-8541-4f95-ab9e-75af5a4c2fca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.231446] env[62383]: DEBUG oslo_vmware.api [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 634.231446] env[62383]: value = "task-2451203" [ 634.231446] env[62383]: _type = "Task" [ 634.231446] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.239203] env[62383]: DEBUG oslo_vmware.api [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.293040] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451201, 'name': CreateVM_Task, 'duration_secs': 0.391333} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.293238] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 634.293943] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.294133] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.294470] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 634.294750] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6248c1ca-3bba-4b91-9cc5-9b537c30e018 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.299806] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 634.299806] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]523f8f33-95b9-89fe-df99-82ac78a0781e" [ 634.299806] env[62383]: _type = "Task" [ 634.299806] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.308293] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523f8f33-95b9-89fe-df99-82ac78a0781e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.440698] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.595s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.443647] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.740s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.446033] env[62383]: INFO nova.compute.claims [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 634.545105] env[62383]: DEBUG nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.573464] env[62383]: INFO nova.compute.manager [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Rebuilding instance [ 634.619142] env[62383]: DEBUG oslo_vmware.api [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451202, 'name': PowerOffVM_Task, 'duration_secs': 0.313571} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.621642] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 634.621695] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 634.622088] env[62383]: DEBUG nova.compute.manager [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.622363] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f1cc042-1479-420a-9bef-51f66bd723e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.624361] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a2cabb-dfd4-43f6-9adc-0df611d8d39f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.693947] env[62383]: DEBUG nova.network.neutron [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Updated VIF entry in instance network info cache for port 3c2cbd45-1a44-495a-bfe1-6e6f90985ded. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 634.694373] env[62383]: DEBUG nova.network.neutron [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Updating instance_info_cache with network_info: [{"id": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "address": "fa:16:3e:2a:51:61", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c2cbd45-1a", "ovs_interfaceid": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.698872] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 634.699105] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 634.699300] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Deleting the datastore file [datastore2] 8e911bad-5408-4588-9865-912ce4457d34 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 634.699543] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c7f60a0-55e1-41dc-bb8b-e0cfaa1acf0c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.706031] env[62383]: DEBUG oslo_vmware.api [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for the task: (returnval){ [ 634.706031] env[62383]: value = "task-2451205" [ 634.706031] env[62383]: _type = "Task" [ 634.706031] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.714405] env[62383]: DEBUG oslo_vmware.api [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451205, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.740929] env[62383]: DEBUG oslo_vmware.api [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451203, 'name': PowerOffVM_Task, 'duration_secs': 0.238938} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.741199] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 634.741389] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 634.741600] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e83c7193-ad57-46a6-82fc-b16672b2b53f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.813616] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523f8f33-95b9-89fe-df99-82ac78a0781e, 'name': SearchDatastore_Task, 'duration_secs': 0.012797} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.813876] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.814123] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 634.814365] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.814526] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.814784] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 634.814999] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c103ebc-fbe3-43da-bd8e-995e4caa7158 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.823328] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 634.823506] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 634.824246] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84c7e122-5672-4518-af62-395993c996da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.829306] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 634.829306] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ccd43b-a903-5ef4-9d88-8bd3142cad93" [ 634.829306] env[62383]: _type = "Task" [ 634.829306] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.836845] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ccd43b-a903-5ef4-9d88-8bd3142cad93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.006771] env[62383]: INFO nova.scheduler.client.report [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Deleted allocation for migration e6999db4-45d8-4db8-bb73-e60e42a7ccfb [ 635.070770] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.197250] env[62383]: DEBUG oslo_concurrency.lockutils [req-524cbf86-bad0-4155-9429-619d274372c3 req-d3ee025a-7410-420c-935e-7d10433f425f service nova] Releasing lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.218032] env[62383]: DEBUG oslo_vmware.api [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Task: {'id': task-2451205, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150661} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.218251] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 635.219048] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 635.219135] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 635.219515] env[62383]: INFO nova.compute.manager [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Took 1.13 seconds to destroy the instance on the hypervisor. [ 635.220676] env[62383]: DEBUG oslo.service.loopingcall [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 635.221557] env[62383]: DEBUG nova.compute.manager [-] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 635.222818] env[62383]: DEBUG nova.network.neutron [-] [instance: 8e911bad-5408-4588-9865-912ce4457d34] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.343043] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ccd43b-a903-5ef4-9d88-8bd3142cad93, 'name': SearchDatastore_Task, 'duration_secs': 0.008473} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.343043] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28fe7f2c-c8af-4bc9-b836-2443657dadd6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.348752] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 635.348752] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a79e92-4384-a90c-d9dd-ec8e1090a7bc" [ 635.348752] env[62383]: _type = "Task" [ 635.348752] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.360021] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a79e92-4384-a90c-d9dd-ec8e1090a7bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.519350] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee4eb599-8c66-4057-9121-f90d8b2250ab tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 29.390s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.640950] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 635.642512] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b18357d-681a-481c-a36b-833476e99010 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.649676] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 635.649676] env[62383]: value = "task-2451207" [ 635.649676] env[62383]: _type = "Task" [ 635.649676] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.662745] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.860599] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a79e92-4384-a90c-d9dd-ec8e1090a7bc, 'name': SearchDatastore_Task, 'duration_secs': 0.013631} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.860889] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.861173] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/152567ba-f24c-4674-b06e-98c76a3da324.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 635.861437] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-748bf528-9f96-412d-930a-61bfab7e7b31 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.869966] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 635.869966] env[62383]: value = "task-2451208" [ 635.869966] env[62383]: _type = "Task" [ 635.869966] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.881733] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.019502] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b642b0-bfe7-429a-b5bf-feed8b7c829f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.027678] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad111887-b2af-48b0-a7ed-5108082c8bd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.059032] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12803710-42f4-4965-a0f3-04dd4f3d458e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.064710] env[62383]: DEBUG nova.compute.manager [req-3c720145-e666-4d72-a65f-4a725408e6e8 req-f28b69e7-33fd-44d0-9c01-7f21c216e39a service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Received event network-vif-deleted-f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 636.065021] env[62383]: INFO nova.compute.manager [req-3c720145-e666-4d72-a65f-4a725408e6e8 req-f28b69e7-33fd-44d0-9c01-7f21c216e39a service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Neutron deleted interface f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e; detaching it from the instance and deleting it from the info cache [ 636.065136] env[62383]: DEBUG nova.network.neutron [req-3c720145-e666-4d72-a65f-4a725408e6e8 req-f28b69e7-33fd-44d0-9c01-7f21c216e39a service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.072416] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7094f716-b6d2-4e75-9438-8d11d25e9594 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.088203] env[62383]: DEBUG nova.compute.provider_tree [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.160175] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451207, 'name': PowerOffVM_Task, 'duration_secs': 0.268286} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.160598] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 636.160930] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 636.162022] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300f36a0-b398-45e2-bfda-4a0fec855c44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.165491] env[62383]: DEBUG nova.network.neutron [-] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.169535] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 636.169714] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50d64b78-fd0f-4893-931b-ff902b941b60 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.197833] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 636.198112] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 636.198368] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Deleting the datastore file [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 636.198712] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3c8ae3a-c460-4612-8187-2f500f70151d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.207342] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 636.207342] env[62383]: value = "task-2451210" [ 636.207342] env[62383]: _type = "Task" [ 636.207342] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.218306] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.332743] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 636.332961] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 636.333162] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Deleting the datastore file [datastore1] 60535a30-4602-4063-94a4-30ed01266d5b {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 636.333429] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1113d405-a7ca-4092-ab37-6c4509e32cd1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.340198] env[62383]: DEBUG oslo_vmware.api [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for the task: (returnval){ [ 636.340198] env[62383]: value = "task-2451211" [ 636.340198] env[62383]: _type = "Task" [ 636.340198] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.348186] env[62383]: DEBUG oslo_vmware.api [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.379449] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451208, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.462056] env[62383]: DEBUG nova.compute.manager [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 636.462360] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ad9dc1-6397-4273-9cde-cc567a6b1148 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.568291] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3629608b-e540-49fe-be0e-85ceac3f77aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.576705] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174e506e-9e08-426a-8470-4e28e342c4bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.591037] env[62383]: DEBUG nova.scheduler.client.report [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 636.606812] env[62383]: DEBUG nova.compute.manager [req-3c720145-e666-4d72-a65f-4a725408e6e8 req-f28b69e7-33fd-44d0-9c01-7f21c216e39a service nova] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Detach interface failed, port_id=f6fa0d52-c6b1-4cdc-8ab2-fafcc94df37e, reason: Instance 8e911bad-5408-4588-9865-912ce4457d34 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 636.670977] env[62383]: INFO nova.compute.manager [-] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Took 1.45 seconds to deallocate network for instance. [ 636.718147] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235065} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.718396] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 636.718612] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 636.718857] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 636.850249] env[62383]: DEBUG oslo_vmware.api [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Task: {'id': task-2451211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182429} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.850249] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 636.850407] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 636.850524] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 636.850697] env[62383]: INFO nova.compute.manager [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Took 2.64 seconds to destroy the instance on the hypervisor. [ 636.850938] env[62383]: DEBUG oslo.service.loopingcall [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.851133] env[62383]: DEBUG nova.compute.manager [-] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 636.851227] env[62383]: DEBUG nova.network.neutron [-] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.881583] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512738} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.884581] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/152567ba-f24c-4674-b06e-98c76a3da324.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 636.884719] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 636.885009] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1a7d4d6-a0f4-4264-b6fa-458eac011253 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.892258] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 636.892258] env[62383]: value = "task-2451212" [ 636.892258] env[62383]: _type = "Task" [ 636.892258] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.903663] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451212, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.907937] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "9604eadf-a027-46dd-989b-0d4b752f883a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.908190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.973729] env[62383]: INFO nova.compute.manager [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] instance snapshotting [ 636.977050] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3124a32d-dc76-4112-ab59-066745d969b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.995645] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b717b86-1d6f-4793-b413-fd130d659a19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.097295] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.098079] env[62383]: DEBUG nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 637.101308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.883s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.103698] env[62383]: INFO nova.compute.claims [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.178688] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.402188] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451212, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07191} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.402528] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 637.403317] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8507d364-cf48-4aef-9c44-99cfdac7be49 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.425087] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/152567ba-f24c-4674-b06e-98c76a3da324.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 637.425644] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bce7e8dd-7d4d-43a5-94a1-3f214ee20a8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.444879] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 637.444879] env[62383]: value = "task-2451213" [ 637.444879] env[62383]: _type = "Task" [ 637.444879] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.454130] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451213, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.506655] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 637.506942] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b0c3a4ec-9bc1-4b18-a9ad-cc704653e4e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.514058] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 637.514058] env[62383]: value = "task-2451214" [ 637.514058] env[62383]: _type = "Task" [ 637.514058] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.522114] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451214, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.585774] env[62383]: DEBUG nova.network.neutron [-] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.610598] env[62383]: DEBUG nova.compute.utils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 637.614060] env[62383]: DEBUG nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 637.614238] env[62383]: DEBUG nova.network.neutron [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 637.663399] env[62383]: DEBUG nova.policy [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '671a6c1983c64c26b3ea501f171045d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bed29fa2bc64a31b3324d7d0d01c61d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 637.749941] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.750235] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.750391] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.750573] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.750719] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.750862] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.751082] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.751244] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.751410] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.751571] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.751745] env[62383]: DEBUG nova.virt.hardware [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.752633] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f05c7f-a2a1-4ad6-8f22-00a64773a6dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.761109] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6198ea18-d5f3-402a-aaa1-f71eaf4bb5a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.775188] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 637.780805] env[62383]: DEBUG oslo.service.loopingcall [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 637.781077] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 637.781289] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05146e67-c433-4fdf-9ac6-8de6eccfaca0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.797687] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 637.797687] env[62383]: value = "task-2451215" [ 637.797687] env[62383]: _type = "Task" [ 637.797687] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.805535] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451215, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.949580] env[62383]: DEBUG nova.network.neutron [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Successfully created port: e04396ac-7097-4a6c-8e34-1a92f30eb36a {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.959017] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451213, 'name': ReconfigVM_Task, 'duration_secs': 0.296066} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.959248] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/152567ba-f24c-4674-b06e-98c76a3da324.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 637.960253] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74fdd9a6-0420-4197-88f0-4c20e146c4c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.967127] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 637.967127] env[62383]: value = "task-2451216" [ 637.967127] env[62383]: _type = "Task" [ 637.967127] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.975844] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451216, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.025209] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451214, 'name': CreateSnapshot_Task, 'duration_secs': 0.504164} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.025516] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 638.026306] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd16caf-7eae-4ee6-884a-8727d3092c98 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.088861] env[62383]: INFO nova.compute.manager [-] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Took 1.24 seconds to deallocate network for instance. [ 638.097138] env[62383]: DEBUG nova.compute.manager [req-a59d8011-5925-444a-8f68-7d218fd0d2ef req-a747bf53-7ae6-4e62-b3d1-09d9f19d9dd9 service nova] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Received event network-vif-deleted-f2203dee-5264-47c9-93de-7653ae3131a8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 638.114677] env[62383]: DEBUG nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 638.307265] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451215, 'name': CreateVM_Task, 'duration_secs': 0.294686} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.309592] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 638.310204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.310406] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.310768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 638.311198] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08ca32cd-35ec-4d1a-9c3a-d24d38bb56c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.315905] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 638.315905] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e7e326-2197-b98d-5d87-5e7c37c53eac" [ 638.315905] env[62383]: _type = "Task" [ 638.315905] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.326383] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e7e326-2197-b98d-5d87-5e7c37c53eac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.477087] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451216, 'name': Rename_Task, 'duration_secs': 0.172093} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.479387] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 638.479804] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee0cad34-6392-47e2-a5d5-b676942759f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.486774] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 638.486774] env[62383]: value = "task-2451217" [ 638.486774] env[62383]: _type = "Task" [ 638.486774] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.496479] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.545124] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 638.545401] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ad8b99ea-fb1d-455f-bf8d-739c6f2c9ed3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.555824] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 638.555824] env[62383]: value = "task-2451218" [ 638.555824] env[62383]: _type = "Task" [ 638.555824] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.563754] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451218, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.590654] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819f8744-ee2a-415b-9596-a1957a1fea1c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.599207] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.600390] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f58387-6cf7-413b-8b4a-9e130194e922 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.633661] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b38e1a3-7428-4b78-83b8-d29021f23593 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.641597] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8feb5ac-596b-4ff9-a7b4-eb74784fd02d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.655581] env[62383]: DEBUG nova.compute.provider_tree [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.827076] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e7e326-2197-b98d-5d87-5e7c37c53eac, 'name': SearchDatastore_Task, 'duration_secs': 0.012572} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.827215] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.827392] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.827626] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.827804] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.827995] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.828266] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa681a47-d775-41e7-94ad-017dc6ca2d93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.836896] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.837111] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.837889] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05d76403-cc41-44a1-8421-34c095439f3a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.846397] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 638.846397] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52094121-1e3d-713d-2cd1-2987591939ed" [ 638.846397] env[62383]: _type = "Task" [ 638.846397] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.854311] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52094121-1e3d-713d-2cd1-2987591939ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.996538] env[62383]: DEBUG oslo_vmware.api [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451217, 'name': PowerOnVM_Task, 'duration_secs': 0.456426} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.996800] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 638.997012] env[62383]: INFO nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Took 7.45 seconds to spawn the instance on the hypervisor. [ 638.997205] env[62383]: DEBUG nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.997955] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55712ef5-cd21-4da8-9f7f-fe6227e397a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.066198] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451218, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.138323] env[62383]: DEBUG nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 639.159998] env[62383]: DEBUG nova.scheduler.client.report [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.165997] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 639.166631] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.166631] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 639.166631] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.166779] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 639.166907] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 639.167422] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 639.167604] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 639.167698] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 639.167894] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 639.168043] env[62383]: DEBUG nova.virt.hardware [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 639.168925] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629b5c04-c756-411f-91be-ff01744a00be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.177928] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66aa0495-9a6a-4aba-8a4d-cd0b10cfa0c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.357268] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52094121-1e3d-713d-2cd1-2987591939ed, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.358208] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ed7e1ba-ad55-4aa1-be6a-056ef5a890e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.363866] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 639.363866] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520cdb90-fed9-a7c4-0d72-ca11735b74c2" [ 639.363866] env[62383]: _type = "Task" [ 639.363866] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.373307] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520cdb90-fed9-a7c4-0d72-ca11735b74c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.468172] env[62383]: DEBUG nova.network.neutron [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Successfully updated port: e04396ac-7097-4a6c-8e34-1a92f30eb36a {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 639.514092] env[62383]: INFO nova.compute.manager [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Took 35.79 seconds to build instance. [ 639.567102] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451218, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.665497] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.666037] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 639.668677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.592s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.668859] env[62383]: DEBUG nova.objects.instance [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 639.874566] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520cdb90-fed9-a7c4-0d72-ca11735b74c2, 'name': SearchDatastore_Task, 'duration_secs': 0.017392} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.874856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.875138] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 639.875838] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ac54510-b996-4bd6-b90b-13b6e160fe85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.881578] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 639.881578] env[62383]: value = "task-2451219" [ 639.881578] env[62383]: _type = "Task" [ 639.881578] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.891596] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.970060] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.970232] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.970387] env[62383]: DEBUG nova.network.neutron [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 640.016151] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e3a4721-431a-49be-b22e-09845bd3028f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "152567ba-f24c-4674-b06e-98c76a3da324" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.917s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.069039] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451218, 'name': CloneVM_Task, 'duration_secs': 1.35307} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.069456] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Created linked-clone VM from snapshot [ 640.070566] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c63f49-dd8a-4427-8b6e-c9bcf0a02adb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.078716] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Uploading image 83dcd21b-ba65-413d-a25a-06168574d922 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 640.101032] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 640.101032] env[62383]: value = "vm-496386" [ 640.101032] env[62383]: _type = "VirtualMachine" [ 640.101032] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 640.101032] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e701df2d-3eba-4600-9764-dee84a81892a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.108542] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lease: (returnval){ [ 640.108542] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20a4b-ee5a-85ae-d298-91f948d481e6" [ 640.108542] env[62383]: _type = "HttpNfcLease" [ 640.108542] env[62383]: } obtained for exporting VM: (result){ [ 640.108542] env[62383]: value = "vm-496386" [ 640.108542] env[62383]: _type = "VirtualMachine" [ 640.108542] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 640.108885] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the lease: (returnval){ [ 640.108885] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20a4b-ee5a-85ae-d298-91f948d481e6" [ 640.108885] env[62383]: _type = "HttpNfcLease" [ 640.108885] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 640.119021] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 640.119021] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20a4b-ee5a-85ae-d298-91f948d481e6" [ 640.119021] env[62383]: _type = "HttpNfcLease" [ 640.119021] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 640.144052] env[62383]: DEBUG nova.compute.manager [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Received event network-vif-plugged-e04396ac-7097-4a6c-8e34-1a92f30eb36a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 640.144294] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] Acquiring lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.144507] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.144638] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.144798] env[62383]: DEBUG nova.compute.manager [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] No waiting events found dispatching network-vif-plugged-e04396ac-7097-4a6c-8e34-1a92f30eb36a {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 640.144964] env[62383]: WARNING nova.compute.manager [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Received unexpected event network-vif-plugged-e04396ac-7097-4a6c-8e34-1a92f30eb36a for instance with vm_state building and task_state spawning. [ 640.145254] env[62383]: DEBUG nova.compute.manager [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Received event network-changed-e04396ac-7097-4a6c-8e34-1a92f30eb36a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 640.145555] env[62383]: DEBUG nova.compute.manager [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Refreshing instance network info cache due to event network-changed-e04396ac-7097-4a6c-8e34-1a92f30eb36a. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 640.145792] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] Acquiring lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.174133] env[62383]: DEBUG nova.compute.utils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 640.178020] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 640.178020] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.226517] env[62383]: DEBUG nova.policy [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bce611334e8242a1bcce747201fde0ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4aa6ead625714631984584d0292b4c40', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 640.393495] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451219, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.506961] env[62383]: DEBUG nova.network.neutron [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.518721] env[62383]: DEBUG nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 640.618418] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 640.618418] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20a4b-ee5a-85ae-d298-91f948d481e6" [ 640.618418] env[62383]: _type = "HttpNfcLease" [ 640.618418] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 640.621016] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 640.621016] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20a4b-ee5a-85ae-d298-91f948d481e6" [ 640.621016] env[62383]: _type = "HttpNfcLease" [ 640.621016] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 640.621016] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5207322-47c0-431e-99bf-e65d3b641d3f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.633337] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52219b84-331a-93fb-1981-d5c4bbea0452/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 640.633668] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52219b84-331a-93fb-1981-d5c4bbea0452/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 640.718246] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 640.721626] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f6f12bc2-ba4c-40c6-bd13-b12dc27d3c19 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.053s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.725661] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Successfully created port: 7d1b1f36-5137-4415-a93a-9ca34e00706d {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 640.732211] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.733s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.733564] env[62383]: DEBUG nova.objects.instance [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lazy-loading 'resources' on Instance uuid 184d0caa-85c2-426d-82e5-ac52e525fe74 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 640.765355] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b2bd150a-dd6d-4981-9609-d8f18a5ce1a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.849447] env[62383]: DEBUG nova.network.neutron [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Updating instance_info_cache with network_info: [{"id": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "address": "fa:16:3e:e9:34:d6", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape04396ac-70", "ovs_interfaceid": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.892340] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791977} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.892504] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.892710] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.892961] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebb8546c-3ac0-4188-a90e-c4f17ae6fdf2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.900652] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 640.900652] env[62383]: value = "task-2451221" [ 640.900652] env[62383]: _type = "Task" [ 640.900652] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.909849] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.047360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.351206] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.351626] env[62383]: DEBUG nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Instance network_info: |[{"id": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "address": "fa:16:3e:e9:34:d6", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape04396ac-70", "ovs_interfaceid": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 641.351945] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] Acquired lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.352239] env[62383]: DEBUG nova.network.neutron [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Refreshing network info cache for port e04396ac-7097-4a6c-8e34-1a92f30eb36a {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 641.353480] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:34:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e04396ac-7097-4a6c-8e34-1a92f30eb36a', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 641.363226] env[62383]: DEBUG oslo.service.loopingcall [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.369546] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 641.370711] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-075a1807-2909-49d5-8f9b-8d02d04bd982 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.394867] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 641.394867] env[62383]: value = "task-2451222" [ 641.394867] env[62383]: _type = "Task" [ 641.394867] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.408841] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451222, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.413550] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073624} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.416124] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.417201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256ca319-9d68-4526-9216-7139de17157a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.440125] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.445610] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7300247f-efb0-4cdf-b0ba-0cfef94d6dec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.468651] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 641.468651] env[62383]: value = "task-2451223" [ 641.468651] env[62383]: _type = "Task" [ 641.468651] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.482361] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451223, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.743116] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 641.768366] env[62383]: DEBUG nova.network.neutron [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Updated VIF entry in instance network info cache for port e04396ac-7097-4a6c-8e34-1a92f30eb36a. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 641.768425] env[62383]: DEBUG nova.network.neutron [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Updating instance_info_cache with network_info: [{"id": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "address": "fa:16:3e:e9:34:d6", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape04396ac-70", "ovs_interfaceid": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.774036] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 641.774036] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 641.774036] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 641.774281] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 641.774281] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 641.774281] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 641.774281] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 641.774281] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 641.775266] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 641.775479] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 641.775694] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 641.776758] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6040cd8f-9b91-48c7-92b0-b291b74bc4cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.790765] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db52234d-4d1a-47ab-9580-7ab299e4fa81 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.866751] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ac8dbc-2cbd-4bc1-b494-3066a2ed9d86 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.875214] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc862b6-8331-40c7-aea5-129649c9da47 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.913762] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec30d7e3-4b07-4fac-a26d-6ee79ff2ce3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.923212] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451222, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.926753] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a26dcd-4bb4-4851-9df5-7bf105992995 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.942050] env[62383]: DEBUG nova.compute.provider_tree [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.981147] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451223, 'name': ReconfigVM_Task, 'duration_secs': 0.316347} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.981541] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9/045e5f8f-edd5-425d-bccb-054d90db27d9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 641.982250] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c321ca22-8d4d-48b9-afaa-76cd9f804cd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.988529] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 641.988529] env[62383]: value = "task-2451224" [ 641.988529] env[62383]: _type = "Task" [ 641.988529] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.996806] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451224, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.271344] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a0dc014-eb45-4e2b-bf0b-e4e18fe2bf35 req-a4bf91c1-be62-49b0-a26e-b17507400c58 service nova] Releasing lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.349419] env[62383]: DEBUG nova.compute.manager [req-2a06b5bd-845c-475f-a109-f69b9d6970e6 req-16200970-69a8-4b21-b1de-a2a0686feac7 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Received event network-vif-plugged-7d1b1f36-5137-4415-a93a-9ca34e00706d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 642.349957] env[62383]: DEBUG oslo_concurrency.lockutils [req-2a06b5bd-845c-475f-a109-f69b9d6970e6 req-16200970-69a8-4b21-b1de-a2a0686feac7 service nova] Acquiring lock "330b5e35-3292-4df7-b288-547b158e671a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.350209] env[62383]: DEBUG oslo_concurrency.lockutils [req-2a06b5bd-845c-475f-a109-f69b9d6970e6 req-16200970-69a8-4b21-b1de-a2a0686feac7 service nova] Lock "330b5e35-3292-4df7-b288-547b158e671a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.350720] env[62383]: DEBUG oslo_concurrency.lockutils [req-2a06b5bd-845c-475f-a109-f69b9d6970e6 req-16200970-69a8-4b21-b1de-a2a0686feac7 service nova] Lock "330b5e35-3292-4df7-b288-547b158e671a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.350921] env[62383]: DEBUG nova.compute.manager [req-2a06b5bd-845c-475f-a109-f69b9d6970e6 req-16200970-69a8-4b21-b1de-a2a0686feac7 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] No waiting events found dispatching network-vif-plugged-7d1b1f36-5137-4415-a93a-9ca34e00706d {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 642.351129] env[62383]: WARNING nova.compute.manager [req-2a06b5bd-845c-475f-a109-f69b9d6970e6 req-16200970-69a8-4b21-b1de-a2a0686feac7 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Received unexpected event network-vif-plugged-7d1b1f36-5137-4415-a93a-9ca34e00706d for instance with vm_state building and task_state spawning. [ 642.420802] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451222, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.431469] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Successfully updated port: 7d1b1f36-5137-4415-a93a-9ca34e00706d {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 642.445326] env[62383]: DEBUG nova.scheduler.client.report [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 642.498728] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451224, 'name': Rename_Task, 'duration_secs': 0.150043} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.499035] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.499272] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bc2b756-d1f1-4bb4-a44e-497b4f3a5622 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.505264] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 642.505264] env[62383]: value = "task-2451225" [ 642.505264] env[62383]: _type = "Task" [ 642.505264] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.513929] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.924398] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451222, 'name': CreateVM_Task, 'duration_secs': 1.398797} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.924641] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 642.925467] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.925698] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.926101] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 642.926465] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77527d61-83b5-4cc1-8c5a-2ac353e5f3f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.931997] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 642.931997] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4908f-83f8-9260-1549-302a416092a1" [ 642.931997] env[62383]: _type = "Task" [ 642.931997] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.935836] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "refresh_cache-330b5e35-3292-4df7-b288-547b158e671a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.936033] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "refresh_cache-330b5e35-3292-4df7-b288-547b158e671a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.936191] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.942693] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4908f-83f8-9260-1549-302a416092a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.951200] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.219s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.954113] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.739s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.955673] env[62383]: INFO nova.compute.claims [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.979195] env[62383]: INFO nova.scheduler.client.report [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Deleted allocations for instance 184d0caa-85c2-426d-82e5-ac52e525fe74 [ 643.015395] env[62383]: DEBUG oslo_vmware.api [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451225, 'name': PowerOnVM_Task, 'duration_secs': 0.475753} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.015592] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.015804] env[62383]: DEBUG nova.compute.manager [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 643.016720] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b74a78-968b-4e17-9a85-d19af0ab1426 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.444892] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4908f-83f8-9260-1549-302a416092a1, 'name': SearchDatastore_Task, 'duration_secs': 0.014725} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.445214] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.445787] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.445787] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.445787] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.446079] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 643.446706] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcaaf0b9-9330-42e0-83c4-95fc651414a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.455166] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 643.455691] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 643.456134] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24eb9801-0e2e-455a-bf7e-f6e6d0bf84dd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.463667] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 643.463667] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5281e1c5-97ee-597f-a778-dc96f9224f2a" [ 643.463667] env[62383]: _type = "Task" [ 643.463667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.471771] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5281e1c5-97ee-597f-a778-dc96f9224f2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.476515] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.486669] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6558a48a-3638-402b-8d45-3e49b74dfbba tempest-ImagesNegativeTestJSON-454859679 tempest-ImagesNegativeTestJSON-454859679-project-member] Lock "184d0caa-85c2-426d-82e5-ac52e525fe74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.801s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.536226] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.617575] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Updating instance_info_cache with network_info: [{"id": "7d1b1f36-5137-4415-a93a-9ca34e00706d", "address": "fa:16:3e:38:b3:f9", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d1b1f36-51", "ovs_interfaceid": "7d1b1f36-5137-4415-a93a-9ca34e00706d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.978544] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5281e1c5-97ee-597f-a778-dc96f9224f2a, 'name': SearchDatastore_Task, 'duration_secs': 0.01316} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.979553] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ca98d0f-7313-4cde-83a0-c70c4ab25713 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.984880] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 643.984880] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e406f5-667a-4b1a-50ab-c919a11b2012" [ 643.984880] env[62383]: _type = "Task" [ 643.984880] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.992973] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e406f5-667a-4b1a-50ab-c919a11b2012, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.120197] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "refresh_cache-330b5e35-3292-4df7-b288-547b158e671a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.120530] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Instance network_info: |[{"id": "7d1b1f36-5137-4415-a93a-9ca34e00706d", "address": "fa:16:3e:38:b3:f9", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d1b1f36-51", "ovs_interfaceid": "7d1b1f36-5137-4415-a93a-9ca34e00706d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 644.123500] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:b3:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d1b1f36-5137-4415-a93a-9ca34e00706d', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.131876] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Creating folder: Project (4aa6ead625714631984584d0292b4c40). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.133420] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b128819b-c2ed-4d92-bd3c-a0f9f225d0da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.146759] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Created folder: Project (4aa6ead625714631984584d0292b4c40) in parent group-v496304. [ 644.146986] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Creating folder: Instances. Parent ref: group-v496388. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.147845] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d023da5-14f1-4fb9-97db-3c2c72dad354 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.160331] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Created folder: Instances in parent group-v496388. [ 644.160585] env[62383]: DEBUG oslo.service.loopingcall [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.161202] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 644.161933] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54b011f8-c93f-4695-94b7-6aec0cf078e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.183996] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.183996] env[62383]: value = "task-2451228" [ 644.183996] env[62383]: _type = "Task" [ 644.183996] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.197629] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451228, 'name': CreateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.496312] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e406f5-667a-4b1a-50ab-c919a11b2012, 'name': SearchDatastore_Task, 'duration_secs': 0.027222} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.496748] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.496864] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/c2fee51e-3cc9-421c-bfe5-b324a5b14197.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 644.497116] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c5b0faa-1c60-485c-8562-713b1073286f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.506738] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 644.506738] env[62383]: value = "task-2451229" [ 644.506738] env[62383]: _type = "Task" [ 644.506738] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.515733] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451229, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.553416] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287cda90-55ab-4dcc-8b3f-5c97a12913e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.560523] env[62383]: DEBUG nova.compute.manager [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Received event network-changed-7d1b1f36-5137-4415-a93a-9ca34e00706d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 644.560840] env[62383]: DEBUG nova.compute.manager [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Refreshing instance network info cache due to event network-changed-7d1b1f36-5137-4415-a93a-9ca34e00706d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 644.561179] env[62383]: DEBUG oslo_concurrency.lockutils [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] Acquiring lock "refresh_cache-330b5e35-3292-4df7-b288-547b158e671a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.561417] env[62383]: DEBUG oslo_concurrency.lockutils [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] Acquired lock "refresh_cache-330b5e35-3292-4df7-b288-547b158e671a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.561682] env[62383]: DEBUG nova.network.neutron [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Refreshing network info cache for port 7d1b1f36-5137-4415-a93a-9ca34e00706d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 644.570153] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d750750-8078-4ae3-bb63-f58203d57384 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.611206] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d37114-c606-4f26-a7c1-52bd27023dde {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.619395] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df54632-2706-4bf1-83f1-17d593592167 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.635886] env[62383]: DEBUG nova.compute.provider_tree [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.694124] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451228, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.735856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "045e5f8f-edd5-425d-bccb-054d90db27d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.736184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "045e5f8f-edd5-425d-bccb-054d90db27d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.736421] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "045e5f8f-edd5-425d-bccb-054d90db27d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.736701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "045e5f8f-edd5-425d-bccb-054d90db27d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.736810] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "045e5f8f-edd5-425d-bccb-054d90db27d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.739418] env[62383]: INFO nova.compute.manager [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Terminating instance [ 645.020128] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451229, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.141207] env[62383]: DEBUG nova.scheduler.client.report [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 645.447809] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451228, 'name': CreateVM_Task, 'duration_secs': 0.52735} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.447809] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 645.447809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.447809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.447809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 645.448116] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bfef5bb-df4f-4a41-a0fa-1f8ec767e64a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.448116] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "refresh_cache-045e5f8f-edd5-425d-bccb-054d90db27d9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.448116] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquired lock "refresh_cache-045e5f8f-edd5-425d-bccb-054d90db27d9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.448116] env[62383]: DEBUG nova.network.neutron [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 645.448893] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 645.448893] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d13048-158a-8d00-69ff-0b831c170c1d" [ 645.448893] env[62383]: _type = "Task" [ 645.448893] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.467562] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d13048-158a-8d00-69ff-0b831c170c1d, 'name': SearchDatastore_Task, 'duration_secs': 0.044305} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.470554] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.470818] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.471288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.471288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.471463] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 645.472024] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a037af8-8f3d-4cb5-ae0f-0295498d44c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.483260] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 645.483260] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 645.483547] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d8b276-d014-4856-a609-577d44dbe355 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.490712] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 645.490712] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]529337c5-9e0d-e701-8967-dfa10ac5be1b" [ 645.490712] env[62383]: _type = "Task" [ 645.490712] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.501901] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529337c5-9e0d-e701-8967-dfa10ac5be1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.515762] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451229, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.795968} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.516113] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/c2fee51e-3cc9-421c-bfe5-b324a5b14197.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 645.516378] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.516639] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df5d48a5-041e-4f1a-9b0b-c050ed1e1f47 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.526138] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 645.526138] env[62383]: value = "task-2451230" [ 645.526138] env[62383]: _type = "Task" [ 645.526138] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.538746] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.645845] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.646412] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 645.651239] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.219s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.652558] env[62383]: INFO nova.compute.claims [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.677390] env[62383]: DEBUG nova.network.neutron [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Updated VIF entry in instance network info cache for port 7d1b1f36-5137-4415-a93a-9ca34e00706d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 645.677775] env[62383]: DEBUG nova.network.neutron [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Updating instance_info_cache with network_info: [{"id": "7d1b1f36-5137-4415-a93a-9ca34e00706d", "address": "fa:16:3e:38:b3:f9", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d1b1f36-51", "ovs_interfaceid": "7d1b1f36-5137-4415-a93a-9ca34e00706d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.970952] env[62383]: DEBUG nova.network.neutron [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 646.005344] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529337c5-9e0d-e701-8967-dfa10ac5be1b, 'name': SearchDatastore_Task, 'duration_secs': 0.011714} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.006341] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b435427f-8a46-4e7f-87d5-890967ec2813 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.013373] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 646.013373] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522c20e0-c70a-39a3-828b-28852e76bebf" [ 646.013373] env[62383]: _type = "Task" [ 646.013373] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.028168] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522c20e0-c70a-39a3-828b-28852e76bebf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.029185] env[62383]: DEBUG nova.network.neutron [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.041099] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095278} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.042037] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.043036] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496e3761-6d23-481f-8664-348aa3776ab8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.070924] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/c2fee51e-3cc9-421c-bfe5-b324a5b14197.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.071696] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6dd8f3a-f983-450c-9c97-8b066f1db49d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.095449] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 646.095449] env[62383]: value = "task-2451231" [ 646.095449] env[62383]: _type = "Task" [ 646.095449] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.103864] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.159830] env[62383]: DEBUG nova.compute.utils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.162871] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 646.164525] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 646.180863] env[62383]: DEBUG oslo_concurrency.lockutils [req-f44365c4-d19b-4660-a74a-d69979ca685d req-3d3b8ec4-647d-4845-a5db-dddfc513c277 service nova] Releasing lock "refresh_cache-330b5e35-3292-4df7-b288-547b158e671a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.210636] env[62383]: DEBUG nova.policy [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bce611334e8242a1bcce747201fde0ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4aa6ead625714631984584d0292b4c40', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 646.507281] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Successfully created port: 4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.533152] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522c20e0-c70a-39a3-828b-28852e76bebf, 'name': SearchDatastore_Task, 'duration_secs': 0.017701} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.533152] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.533152] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 330b5e35-3292-4df7-b288-547b158e671a/330b5e35-3292-4df7-b288-547b158e671a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 646.533152] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91287261-cfe0-4cc8-b8d1-a1437b3667a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.539549] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Releasing lock "refresh_cache-045e5f8f-edd5-425d-bccb-054d90db27d9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.540301] env[62383]: DEBUG nova.compute.manager [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 646.540580] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 646.540994] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 646.540994] env[62383]: value = "task-2451232" [ 646.540994] env[62383]: _type = "Task" [ 646.540994] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.541919] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69dd3184-c68f-4d12-88f6-16399ebfab1b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.553204] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 646.556510] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7edc1445-84b8-49fa-82b0-b96613b9a20b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.558649] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451232, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.562648] env[62383]: DEBUG oslo_vmware.api [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 646.562648] env[62383]: value = "task-2451233" [ 646.562648] env[62383]: _type = "Task" [ 646.562648] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.572582] env[62383]: DEBUG oslo_vmware.api [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.607419] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451231, 'name': ReconfigVM_Task, 'duration_secs': 0.417325} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.607815] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Reconfigured VM instance instance-0000001e to attach disk [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/c2fee51e-3cc9-421c-bfe5-b324a5b14197.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.608927] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6f31a70-d880-49b6-81cb-e9f56c8cb901 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.615781] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 646.615781] env[62383]: value = "task-2451234" [ 646.615781] env[62383]: _type = "Task" [ 646.615781] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.626407] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451234, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.670608] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 647.062141] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451232, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.083026] env[62383]: DEBUG oslo_vmware.api [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451233, 'name': PowerOffVM_Task, 'duration_secs': 0.147834} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.083026] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 647.083026] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 647.083026] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb5f7da5-03a7-432a-a99c-16cee900beae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.114024] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 647.114024] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 647.114024] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Deleting the datastore file [datastore2] 045e5f8f-edd5-425d-bccb-054d90db27d9 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 647.114024] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40d89898-9442-4350-943d-47a93e83688d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.126577] env[62383]: DEBUG oslo_vmware.api [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for the task: (returnval){ [ 647.126577] env[62383]: value = "task-2451236" [ 647.126577] env[62383]: _type = "Task" [ 647.126577] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.135585] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451234, 'name': Rename_Task, 'duration_secs': 0.190442} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.135585] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 647.135936] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f72775d2-49aa-490c-a057-07e36e58ebad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.141796] env[62383]: DEBUG oslo_vmware.api [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.153010] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 647.153010] env[62383]: value = "task-2451237" [ 647.153010] env[62383]: _type = "Task" [ 647.153010] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.162214] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.394176] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab87beb0-ada7-4eb8-96f9-ad58ca0aa22a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.401597] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de29ba5e-0d32-4e52-85cd-439773867553 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.431880] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c58cb6-d6f7-44c2-80f2-d92696b1f686 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.439573] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfe2ec8-ba50-477c-b64b-788bda3fa624 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.453129] env[62383]: DEBUG nova.compute.provider_tree [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.555810] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451232, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685092} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.556184] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 330b5e35-3292-4df7-b288-547b158e671a/330b5e35-3292-4df7-b288-547b158e671a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 647.556908] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 647.556908] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed3fd713-8598-4fda-bbc1-6688af2ab928 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.563772] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 647.563772] env[62383]: value = "task-2451238" [ 647.563772] env[62383]: _type = "Task" [ 647.563772] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.573889] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451238, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.638162] env[62383]: DEBUG oslo_vmware.api [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Task: {'id': task-2451236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211811} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.638162] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 647.638162] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 647.638359] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.640056] env[62383]: INFO nova.compute.manager [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Took 1.10 seconds to destroy the instance on the hypervisor. [ 647.640056] env[62383]: DEBUG oslo.service.loopingcall [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.640056] env[62383]: DEBUG nova.compute.manager [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 647.640056] env[62383]: DEBUG nova.network.neutron [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.660216] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451237, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.663042] env[62383]: DEBUG nova.network.neutron [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.682362] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 647.705206] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 647.705455] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.705609] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 647.705786] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.705930] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 647.706089] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 647.706304] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 647.706467] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 647.706667] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 647.706833] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 647.707014] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 647.708216] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa42b82e-dc5c-4092-a173-8126bad1dfdb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.716320] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69371644-f60d-4958-9d7b-0f996d5338c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.959991] env[62383]: DEBUG nova.scheduler.client.report [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.071896] env[62383]: DEBUG nova.compute.manager [req-3751b29e-56f3-4dec-a487-acef9a438ddb req-87963800-ccc4-4c81-8c7b-1cf118f7b854 service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Received event network-vif-plugged-4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 648.072222] env[62383]: DEBUG oslo_concurrency.lockutils [req-3751b29e-56f3-4dec-a487-acef9a438ddb req-87963800-ccc4-4c81-8c7b-1cf118f7b854 service nova] Acquiring lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.072595] env[62383]: DEBUG oslo_concurrency.lockutils [req-3751b29e-56f3-4dec-a487-acef9a438ddb req-87963800-ccc4-4c81-8c7b-1cf118f7b854 service nova] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.073845] env[62383]: DEBUG oslo_concurrency.lockutils [req-3751b29e-56f3-4dec-a487-acef9a438ddb req-87963800-ccc4-4c81-8c7b-1cf118f7b854 service nova] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.073845] env[62383]: DEBUG nova.compute.manager [req-3751b29e-56f3-4dec-a487-acef9a438ddb req-87963800-ccc4-4c81-8c7b-1cf118f7b854 service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] No waiting events found dispatching network-vif-plugged-4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 648.073845] env[62383]: WARNING nova.compute.manager [req-3751b29e-56f3-4dec-a487-acef9a438ddb req-87963800-ccc4-4c81-8c7b-1cf118f7b854 service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Received unexpected event network-vif-plugged-4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 for instance with vm_state building and task_state spawning. [ 648.080628] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451238, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073512} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.080941] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 648.082126] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3810215-dc85-4619-ac7d-2584b6f1ca9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.109290] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 330b5e35-3292-4df7-b288-547b158e671a/330b5e35-3292-4df7-b288-547b158e671a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 648.109881] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4319426-fb25-43e9-ad6f-428ac12581ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.130855] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 648.130855] env[62383]: value = "task-2451239" [ 648.130855] env[62383]: _type = "Task" [ 648.130855] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.139514] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451239, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.160602] env[62383]: DEBUG oslo_vmware.api [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451237, 'name': PowerOnVM_Task, 'duration_secs': 0.574617} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.160898] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.161123] env[62383]: INFO nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Took 9.02 seconds to spawn the instance on the hypervisor. [ 648.161321] env[62383]: DEBUG nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.162359] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f209bd2-93a4-455c-aa16-19c56ae2d8e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.165199] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Successfully updated port: 4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.166308] env[62383]: DEBUG nova.network.neutron [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.467684] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.468343] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 648.471131] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.772s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.472566] env[62383]: INFO nova.compute.claims [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.643489] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451239, 'name': ReconfigVM_Task, 'duration_secs': 0.312492} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.643811] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 330b5e35-3292-4df7-b288-547b158e671a/330b5e35-3292-4df7-b288-547b158e671a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.644529] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-743a6584-7208-4fa1-bd75-4cd8922a1d5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.652256] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 648.652256] env[62383]: value = "task-2451240" [ 648.652256] env[62383]: _type = "Task" [ 648.652256] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.661722] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451240, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.668396] env[62383]: INFO nova.compute.manager [-] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Took 1.03 seconds to deallocate network for instance. [ 648.669073] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "refresh_cache-6b5daa17-ad4a-4b30-a1fe-083a1a238667" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.669073] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "refresh_cache-6b5daa17-ad4a-4b30-a1fe-083a1a238667" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.669073] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.682642] env[62383]: INFO nova.compute.manager [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Took 37.99 seconds to build instance. [ 648.979153] env[62383]: DEBUG nova.compute.utils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.982560] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 648.982704] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 649.029209] env[62383]: DEBUG nova.policy [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bce611334e8242a1bcce747201fde0ee', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4aa6ead625714631984584d0292b4c40', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 649.163651] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451240, 'name': Rename_Task, 'duration_secs': 0.149018} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.163930] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.164204] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-613f0cd3-a18a-44c8-ad83-f76aedc0d7e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.170801] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 649.170801] env[62383]: value = "task-2451241" [ 649.170801] env[62383]: _type = "Task" [ 649.170801] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.181403] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.191942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d625acf7-a3f7-4225-b538-0157ae11995e tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.838s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.191942] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451241, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.263910] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.286429] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Successfully created port: d6741482-4f9c-47b3-83b3-2c1ed7a7fbad {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 649.488856] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 649.530022] env[62383]: INFO nova.compute.manager [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Rescuing [ 649.530022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.530022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.530022] env[62383]: DEBUG nova.network.neutron [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.586222] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52219b84-331a-93fb-1981-d5c4bbea0452/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 649.586222] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1279ff7-4b5b-49ae-a831-2ffdd6a1007f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.595288] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52219b84-331a-93fb-1981-d5c4bbea0452/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 649.595288] env[62383]: ERROR oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52219b84-331a-93fb-1981-d5c4bbea0452/disk-0.vmdk due to incomplete transfer. [ 649.597944] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-97c9f393-f478-4eb7-8e5a-11d4c157ef3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.608619] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52219b84-331a-93fb-1981-d5c4bbea0452/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 649.609157] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Uploaded image 83dcd21b-ba65-413d-a25a-06168574d922 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 649.612247] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 649.615137] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Updating instance_info_cache with network_info: [{"id": "4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8", "address": "fa:16:3e:93:67:a6", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e965d9c-3c", "ovs_interfaceid": "4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.615137] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4b951f75-d307-40ef-900c-57e2b1b92b0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.626022] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 649.626022] env[62383]: value = "task-2451242" [ 649.626022] env[62383]: _type = "Task" [ 649.626022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.639175] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451242, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.681455] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451241, 'name': PowerOnVM_Task, 'duration_secs': 0.510193} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.681739] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 649.682382] env[62383]: INFO nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Took 7.94 seconds to spawn the instance on the hypervisor. [ 649.682382] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 649.683048] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9020384a-e0e7-4f12-9a06-4cdb49220adf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.695115] env[62383]: DEBUG nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 650.119258] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "refresh_cache-6b5daa17-ad4a-4b30-a1fe-083a1a238667" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.119675] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Instance network_info: |[{"id": "4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8", "address": "fa:16:3e:93:67:a6", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e965d9c-3c", "ovs_interfaceid": "4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 650.120119] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:67:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.130173] env[62383]: DEBUG oslo.service.loopingcall [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.134219] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 650.138833] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-897b375e-8e92-4b01-b8bf-1ed8f0a1157f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.164690] env[62383]: DEBUG nova.compute.manager [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Received event network-changed-4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 650.164879] env[62383]: DEBUG nova.compute.manager [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Refreshing instance network info cache due to event network-changed-4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 650.165107] env[62383]: DEBUG oslo_concurrency.lockutils [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] Acquiring lock "refresh_cache-6b5daa17-ad4a-4b30-a1fe-083a1a238667" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.165247] env[62383]: DEBUG oslo_concurrency.lockutils [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] Acquired lock "refresh_cache-6b5daa17-ad4a-4b30-a1fe-083a1a238667" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.165401] env[62383]: DEBUG nova.network.neutron [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Refreshing network info cache for port 4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 650.172291] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451242, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.172510] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.172510] env[62383]: value = "task-2451243" [ 650.172510] env[62383]: _type = "Task" [ 650.172510] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.179579] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acd4ada-fd9f-4353-86db-d71d52ae1568 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.189625] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451243, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.202655] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e7d018-7c29-43d3-8553-439573aee3d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.247482] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.251718] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828f6224-c54f-4cd2-9528-32ccdbf298e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.261325] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001ead76-11b3-4b0a-bf82-afaeafa09fbf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.270022] env[62383]: INFO nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Took 39.08 seconds to build instance. [ 650.282033] env[62383]: DEBUG nova.compute.provider_tree [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.484532] env[62383]: DEBUG nova.network.neutron [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Updating instance_info_cache with network_info: [{"id": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "address": "fa:16:3e:e9:34:d6", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape04396ac-70", "ovs_interfaceid": "e04396ac-7097-4a6c-8e34-1a92f30eb36a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.503701] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 650.534650] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 650.534650] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.534650] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 650.534794] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.534794] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 650.534794] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 650.534794] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 650.534794] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 650.534956] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 650.534989] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 650.538580] env[62383]: DEBUG nova.virt.hardware [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 650.538580] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a12427-d516-4b78-b5f1-ca9089cf5150 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.544817] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5a6fe6-48f0-4011-b2c1-4c1a76c0361e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.643461] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451242, 'name': Destroy_Task, 'duration_secs': 0.857095} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.643735] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Destroyed the VM [ 650.643998] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 650.644284] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d07ac005-541f-41a7-8be7-a5c88d5079e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.651324] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 650.651324] env[62383]: value = "task-2451244" [ 650.651324] env[62383]: _type = "Task" [ 650.651324] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.662168] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.683377] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451243, 'name': CreateVM_Task, 'duration_secs': 0.475663} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.683377] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.683669] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.683779] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.684108] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 650.684355] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4df32e1-5c77-4d22-9098-f8f31377be75 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.692527] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 650.692527] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52629104-a50c-9894-0a95-19ade24ee97c" [ 650.692527] env[62383]: _type = "Task" [ 650.692527] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.702942] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52629104-a50c-9894-0a95-19ade24ee97c, 'name': SearchDatastore_Task, 'duration_secs': 0.011082} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.703246] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.703474] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.704103] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.704925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.704925] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.704925] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d7d3d85-3da2-43f5-b7b1-2e0ce03b20c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.713528] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.714799] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 650.715714] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d459e621-4729-4173-8578-67f637e45d52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.723684] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 650.723684] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5299bc2a-9214-df26-c411-e1c2450bf702" [ 650.723684] env[62383]: _type = "Task" [ 650.723684] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.736294] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5299bc2a-9214-df26-c411-e1c2450bf702, 'name': SearchDatastore_Task, 'duration_secs': 0.008317} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.737290] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3764b2fa-8774-4caa-9b91-47e84e56a755 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.742670] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 650.742670] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52087998-bd50-2dfe-f5e4-56e1fba220dd" [ 650.742670] env[62383]: _type = "Task" [ 650.742670] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.750344] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52087998-bd50-2dfe-f5e4-56e1fba220dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.771545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "330b5e35-3292-4df7-b288-547b158e671a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.860s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.787776] env[62383]: DEBUG nova.scheduler.client.report [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.890523] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Successfully updated port: d6741482-4f9c-47b3-83b3-2c1ed7a7fbad {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 650.987910] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-c2fee51e-3cc9-421c-bfe5-b324a5b14197" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.163948] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.253809] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52087998-bd50-2dfe-f5e4-56e1fba220dd, 'name': SearchDatastore_Task, 'duration_secs': 0.00802} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.253809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.254045] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 6b5daa17-ad4a-4b30-a1fe-083a1a238667/6b5daa17-ad4a-4b30-a1fe-083a1a238667.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 651.254233] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81218a28-0041-40f9-abfa-bc8434517313 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.264868] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 651.264868] env[62383]: value = "task-2451245" [ 651.264868] env[62383]: _type = "Task" [ 651.264868] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.279474] env[62383]: DEBUG nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 651.282959] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.296278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.825s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.296800] env[62383]: DEBUG nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 651.300992] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 29.286s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.301215] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.301388] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 651.301718] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.770s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.301948] env[62383]: DEBUG nova.objects.instance [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lazy-loading 'resources' on Instance uuid 13db2c17-ccba-4336-929a-0d01202c5143 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 651.308671] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027b5d28-79d5-4b59-9b25-e8a981727b8c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.318188] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce67aaf7-116e-4d0e-8c66-92d7339a60a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.334181] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b7b6c6-00a8-4632-80b1-503732023699 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.341374] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55494386-0e1a-4dc3-9445-9ce846860446 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.374047] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178877MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 651.374216] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.398660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "refresh_cache-872ac212-9f29-426d-94c7-e1bf73aebd94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.398660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "refresh_cache-872ac212-9f29-426d-94c7-e1bf73aebd94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.398660] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 651.431923] env[62383]: DEBUG nova.network.neutron [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Updated VIF entry in instance network info cache for port 4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 651.432129] env[62383]: DEBUG nova.network.neutron [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Updating instance_info_cache with network_info: [{"id": "4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8", "address": "fa:16:3e:93:67:a6", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e965d9c-3c", "ovs_interfaceid": "4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.666609] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.775085] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.802542] env[62383]: DEBUG nova.compute.utils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 651.803844] env[62383]: DEBUG nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 651.804053] env[62383]: DEBUG nova.network.neutron [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.812144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.882022] env[62383]: DEBUG nova.policy [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '313394d2b7a24a20b12b72f37fd07c1f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ba1830673394f88a05df80fdba83729', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 651.937436] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.939883] env[62383]: DEBUG oslo_concurrency.lockutils [req-b012cd9f-8f4a-425b-aeb7-b9dcdf236123 req-4073abe5-82dd-42a8-9210-463030f6ab4d service nova] Releasing lock "refresh_cache-6b5daa17-ad4a-4b30-a1fe-083a1a238667" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.110079] env[62383]: DEBUG nova.network.neutron [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Updating instance_info_cache with network_info: [{"id": "d6741482-4f9c-47b3-83b3-2c1ed7a7fbad", "address": "fa:16:3e:42:f6:9c", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6741482-4f", "ovs_interfaceid": "d6741482-4f9c-47b3-83b3-2c1ed7a7fbad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.169840] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.278290] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451245, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.312086] env[62383]: DEBUG nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 652.321790] env[62383]: DEBUG nova.compute.manager [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Received event network-vif-plugged-d6741482-4f9c-47b3-83b3-2c1ed7a7fbad {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 652.321790] env[62383]: DEBUG oslo_concurrency.lockutils [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] Acquiring lock "872ac212-9f29-426d-94c7-e1bf73aebd94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.322018] env[62383]: DEBUG oslo_concurrency.lockutils [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.322134] env[62383]: DEBUG oslo_concurrency.lockutils [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.322301] env[62383]: DEBUG nova.compute.manager [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] No waiting events found dispatching network-vif-plugged-d6741482-4f9c-47b3-83b3-2c1ed7a7fbad {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 652.324861] env[62383]: WARNING nova.compute.manager [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Received unexpected event network-vif-plugged-d6741482-4f9c-47b3-83b3-2c1ed7a7fbad for instance with vm_state building and task_state spawning. [ 652.325134] env[62383]: DEBUG nova.compute.manager [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Received event network-changed-d6741482-4f9c-47b3-83b3-2c1ed7a7fbad {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 652.325384] env[62383]: DEBUG nova.compute.manager [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Refreshing instance network info cache due to event network-changed-d6741482-4f9c-47b3-83b3-2c1ed7a7fbad. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 652.325527] env[62383]: DEBUG oslo_concurrency.lockutils [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] Acquiring lock "refresh_cache-872ac212-9f29-426d-94c7-e1bf73aebd94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.386918] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab25a5b-3db6-48f6-a169-7912000d4884 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.398355] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60a73d5-87a5-48ad-93fd-3d4c7ee15ff7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.435597] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789d164a-7594-4830-a772-c10bfdbd29d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.448022] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090a3bd3-33ce-489f-a664-0c734adf072d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.465310] env[62383]: DEBUG nova.compute.provider_tree [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.524115] env[62383]: DEBUG nova.network.neutron [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Successfully created port: d251f129-de40-462a-86b9-50939d1a57c2 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.539431] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 652.539877] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-340be0c5-0417-4d87-97ec-22f63e83b806 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.548541] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 652.548541] env[62383]: value = "task-2451246" [ 652.548541] env[62383]: _type = "Task" [ 652.548541] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.562682] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.614123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "refresh_cache-872ac212-9f29-426d-94c7-e1bf73aebd94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.614442] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Instance network_info: |[{"id": "d6741482-4f9c-47b3-83b3-2c1ed7a7fbad", "address": "fa:16:3e:42:f6:9c", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6741482-4f", "ovs_interfaceid": "d6741482-4f9c-47b3-83b3-2c1ed7a7fbad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 652.614755] env[62383]: DEBUG oslo_concurrency.lockutils [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] Acquired lock "refresh_cache-872ac212-9f29-426d-94c7-e1bf73aebd94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.614939] env[62383]: DEBUG nova.network.neutron [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Refreshing network info cache for port d6741482-4f9c-47b3-83b3-2c1ed7a7fbad {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.616876] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:f6:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '359850cc-b061-4c9c-a61c-eb42e0f7c359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6741482-4f9c-47b3-83b3-2c1ed7a7fbad', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.626776] env[62383]: DEBUG oslo.service.loopingcall [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.628067] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.628300] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d4700e6-1ef2-4109-a7d8-80fbef2f4e73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.653558] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.653558] env[62383]: value = "task-2451247" [ 652.653558] env[62383]: _type = "Task" [ 652.653558] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.672247] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451247, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.676882] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451244, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.777581] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451245, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.339288} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.777888] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 6b5daa17-ad4a-4b30-a1fe-083a1a238667/6b5daa17-ad4a-4b30-a1fe-083a1a238667.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 652.778111] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 652.780169] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1df0393b-ca6f-4307-9f43-236e0b03aac7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.786163] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 652.786163] env[62383]: value = "task-2451248" [ 652.786163] env[62383]: _type = "Task" [ 652.786163] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.792461] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.968572] env[62383]: DEBUG nova.scheduler.client.report [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.064223] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451246, 'name': PowerOffVM_Task, 'duration_secs': 0.200719} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.064223] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 653.064223] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b677e8-6b7b-4ac4-a510-c74ef7f4a154 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.080165] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc2c962-77e8-448d-98fc-16162cd413b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.090433] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "563840a8-8fa7-4bfa-9912-933c14e7076a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.091024] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.120760] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 653.121083] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-887fe594-bae6-45a2-a4dd-134bc45b889a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.130290] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 653.130290] env[62383]: value = "task-2451249" [ 653.130290] env[62383]: _type = "Task" [ 653.130290] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.138679] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 653.138925] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.139262] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.139350] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.139515] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.139755] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99cb7a93-fc85-4031-ae0b-5335f887d892 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.147186] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.147708] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.148079] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8406c1f-b8ec-4036-a99d-8f254e2d49a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.153112] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 653.153112] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52496b36-8521-4f2f-90ee-ae98ebbbc40a" [ 653.153112] env[62383]: _type = "Task" [ 653.153112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.169111] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52496b36-8521-4f2f-90ee-ae98ebbbc40a, 'name': SearchDatastore_Task, 'duration_secs': 0.008412} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.172508] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451247, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.173022] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65defc6c-b19b-4d54-8b48-03aa0abf0664 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.179421] env[62383]: DEBUG oslo_vmware.api [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451244, 'name': RemoveSnapshot_Task, 'duration_secs': 2.08052} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.180103] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 653.180387] env[62383]: INFO nova.compute.manager [None req-6bb83045-41e2-43cc-a103-4544830c066f tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Took 16.20 seconds to snapshot the instance on the hypervisor. [ 653.184526] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 653.184526] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5224dbda-8c99-87e9-628d-920e180cdf01" [ 653.184526] env[62383]: _type = "Task" [ 653.184526] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.192159] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5224dbda-8c99-87e9-628d-920e180cdf01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.293392] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.322043] env[62383]: DEBUG nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 653.346979] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 653.347244] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.347403] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.347584] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.347730] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.347876] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 653.348089] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 653.348252] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 653.348415] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 653.348601] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 653.348778] env[62383]: DEBUG nova.virt.hardware [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 653.349665] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9771d5f0-10ed-4f43-be50-7a3d68b5f41e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.358250] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ed0d534-27ba-4f7a-b88f-16e17837a1f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.362846] env[62383]: DEBUG nova.network.neutron [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Updated VIF entry in instance network info cache for port d6741482-4f9c-47b3-83b3-2c1ed7a7fbad. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 653.363212] env[62383]: DEBUG nova.network.neutron [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Updating instance_info_cache with network_info: [{"id": "d6741482-4f9c-47b3-83b3-2c1ed7a7fbad", "address": "fa:16:3e:42:f6:9c", "network": {"id": "6d51bcbf-affc-49e9-92e2-e358b0aef6f9", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-252398505-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4aa6ead625714631984584d0292b4c40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "359850cc-b061-4c9c-a61c-eb42e0f7c359", "external-id": "nsx-vlan-transportzone-113", "segmentation_id": 113, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6741482-4f", "ovs_interfaceid": "d6741482-4f9c-47b3-83b3-2c1ed7a7fbad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.473704] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.172s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.476056] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.732s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.476289] env[62383]: DEBUG nova.objects.instance [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lazy-loading 'resources' on Instance uuid 0d992155-24fa-4836-83c9-8f188f7d7efa {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 653.494289] env[62383]: INFO nova.scheduler.client.report [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted allocations for instance 13db2c17-ccba-4336-929a-0d01202c5143 [ 653.670021] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451247, 'name': CreateVM_Task, 'duration_secs': 0.834383} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.670021] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.670021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.670021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.670021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 653.670295] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ff2723e-dc16-4826-8428-ec4afd485dc0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.673613] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 653.673613] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]523a5048-9b2c-60f3-162b-aeb57856b879" [ 653.673613] env[62383]: _type = "Task" [ 653.673613] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.681139] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523a5048-9b2c-60f3-162b-aeb57856b879, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.695484] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5224dbda-8c99-87e9-628d-920e180cdf01, 'name': SearchDatastore_Task, 'duration_secs': 0.011133} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.695860] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.696225] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. {{(pid=62383) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 653.696851] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e52ed999-5991-44ba-bc84-27b2f0b81d97 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.704256] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 653.704256] env[62383]: value = "task-2451250" [ 653.704256] env[62383]: _type = "Task" [ 653.704256] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.712600] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.793913] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.926567} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.794197] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.794994] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7f952c-136a-411d-a127-60fec7d6a158 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.817270] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 6b5daa17-ad4a-4b30-a1fe-083a1a238667/6b5daa17-ad4a-4b30-a1fe-083a1a238667.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.817552] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0e5a888-e560-431e-9b7c-cdc863530a78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.836910] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 653.836910] env[62383]: value = "task-2451251" [ 653.836910] env[62383]: _type = "Task" [ 653.836910] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.844979] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.875565] env[62383]: DEBUG oslo_concurrency.lockutils [req-911ce5be-fc2c-48c8-8de1-7cdde6c8d757 req-b4f01d7f-6216-4585-841d-cadc2e1d6da4 service nova] Releasing lock "refresh_cache-872ac212-9f29-426d-94c7-e1bf73aebd94" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.004217] env[62383]: DEBUG oslo_concurrency.lockutils [None req-856d6cf0-1cb6-487c-a1dc-deffff04f67e tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "13db2c17-ccba-4336-929a-0d01202c5143" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.967s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.187326] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523a5048-9b2c-60f3-162b-aeb57856b879, 'name': SearchDatastore_Task, 'duration_secs': 0.009745} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.187662] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.187892] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.188158] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.188308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.188504] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.188774] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a44ee8c9-4b22-4c81-ac39-7d599b972585 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.198076] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.198314] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.199133] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3def5b2-e509-49db-b891-4521e1d760a2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.210051] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 654.210051] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b95f02-0c32-9b4d-f240-8b8ab6d6ff96" [ 654.210051] env[62383]: _type = "Task" [ 654.210051] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.219321] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485291} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.219967] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. [ 654.220823] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d0f4a9-01b1-4a28-97a2-b4d4466cd726 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.227285] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b95f02-0c32-9b4d-f240-8b8ab6d6ff96, 'name': SearchDatastore_Task, 'duration_secs': 0.007852} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.228485] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9543f756-b710-4b92-b2e2-7771eb0b7ad7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.255181] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 654.257232] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56c8b9f6-9f78-4281-a86a-85a94597198b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.271562] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 654.271562] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52cd0326-1c1c-3a29-f874-3eed0c05fcdd" [ 654.271562] env[62383]: _type = "Task" [ 654.271562] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.276324] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 654.276324] env[62383]: value = "task-2451252" [ 654.276324] env[62383]: _type = "Task" [ 654.276324] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.279848] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cd0326-1c1c-3a29-f874-3eed0c05fcdd, 'name': SearchDatastore_Task, 'duration_secs': 0.008684} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.283057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.283333] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 872ac212-9f29-426d-94c7-e1bf73aebd94/872ac212-9f29-426d-94c7-e1bf73aebd94.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.283587] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62f0bc65-3ebc-4767-8406-94beed2afd98 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.293035] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.295235] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 654.295235] env[62383]: value = "task-2451253" [ 654.295235] env[62383]: _type = "Task" [ 654.295235] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.304171] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.350044] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.512654] env[62383]: DEBUG nova.compute.manager [req-846527d2-8fcd-4f75-a938-64b16b67794d req-4b7a49e7-6a3c-4b6a-8cb9-43f9630775fd service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Received event network-vif-plugged-d251f129-de40-462a-86b9-50939d1a57c2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 654.513118] env[62383]: DEBUG oslo_concurrency.lockutils [req-846527d2-8fcd-4f75-a938-64b16b67794d req-4b7a49e7-6a3c-4b6a-8cb9-43f9630775fd service nova] Acquiring lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.513118] env[62383]: DEBUG oslo_concurrency.lockutils [req-846527d2-8fcd-4f75-a938-64b16b67794d req-4b7a49e7-6a3c-4b6a-8cb9-43f9630775fd service nova] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.513275] env[62383]: DEBUG oslo_concurrency.lockutils [req-846527d2-8fcd-4f75-a938-64b16b67794d req-4b7a49e7-6a3c-4b6a-8cb9-43f9630775fd service nova] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.513436] env[62383]: DEBUG nova.compute.manager [req-846527d2-8fcd-4f75-a938-64b16b67794d req-4b7a49e7-6a3c-4b6a-8cb9-43f9630775fd service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] No waiting events found dispatching network-vif-plugged-d251f129-de40-462a-86b9-50939d1a57c2 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 654.513650] env[62383]: WARNING nova.compute.manager [req-846527d2-8fcd-4f75-a938-64b16b67794d req-4b7a49e7-6a3c-4b6a-8cb9-43f9630775fd service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Received unexpected event network-vif-plugged-d251f129-de40-462a-86b9-50939d1a57c2 for instance with vm_state building and task_state spawning. [ 654.516141] env[62383]: DEBUG nova.network.neutron [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Successfully updated port: d251f129-de40-462a-86b9-50939d1a57c2 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 654.524247] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.524363] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.524605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.524817] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.525025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.529444] env[62383]: INFO nova.compute.manager [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Terminating instance [ 654.611983] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae804a97-9b40-482c-9ff8-c24b7ebde3c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.619968] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55b1f06-ee32-4b0f-beb6-c50f3de41d88 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.653016] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683856ac-5471-4476-b665-82199cad0b45 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.661489] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbff988-9de5-4d80-a0e7-2af4dce31105 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.675767] env[62383]: DEBUG nova.compute.provider_tree [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.790492] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451252, 'name': ReconfigVM_Task, 'duration_secs': 0.501131} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.790837] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Reconfigured VM instance instance-0000001e to attach disk [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.791746] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ab8e55-48c0-4311-a441-724fd7c572b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.825711] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76161095-db02-47d3-b2b4-c4e3bba25b87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.835892] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451253, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.842190] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 654.842190] env[62383]: value = "task-2451254" [ 654.842190] env[62383]: _type = "Task" [ 654.842190] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.845452] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451251, 'name': ReconfigVM_Task, 'duration_secs': 0.559531} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.848278] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 6b5daa17-ad4a-4b30-a1fe-083a1a238667/6b5daa17-ad4a-4b30-a1fe-083a1a238667.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.848884] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef0d156c-7e42-44a7-8ae8-a2067f2bbab1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.854607] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451254, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.855705] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 654.855705] env[62383]: value = "task-2451255" [ 654.855705] env[62383]: _type = "Task" [ 654.855705] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.863831] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451255, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.021396] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "refresh_cache-9c2c55a9-5b24-4d52-8d6b-666609349a3a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.021572] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired lock "refresh_cache-9c2c55a9-5b24-4d52-8d6b-666609349a3a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.021733] env[62383]: DEBUG nova.network.neutron [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 655.036091] env[62383]: DEBUG nova.compute.manager [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 655.036329] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 655.037191] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6440be35-6cff-40f7-b2e2-09a0ff7c59af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.047893] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 655.048184] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-694460d3-b476-4599-8968-0ea68980429c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.055262] env[62383]: DEBUG oslo_vmware.api [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 655.055262] env[62383]: value = "task-2451256" [ 655.055262] env[62383]: _type = "Task" [ 655.055262] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.063599] env[62383]: DEBUG oslo_vmware.api [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.179467] env[62383]: DEBUG nova.scheduler.client.report [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.305527] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540794} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.305777] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 872ac212-9f29-426d-94c7-e1bf73aebd94/872ac212-9f29-426d-94c7-e1bf73aebd94.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.305987] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.306640] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e4ff4e8-3f56-4a9e-8ac0-799ee37e40c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.312312] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 655.312312] env[62383]: value = "task-2451257" [ 655.312312] env[62383]: _type = "Task" [ 655.312312] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.319960] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.356773] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451254, 'name': ReconfigVM_Task, 'duration_secs': 0.244367} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.359904] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.360171] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-754d0981-f4d2-4d70-9818-832b8bb803d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.366821] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451255, 'name': Rename_Task, 'duration_secs': 0.358281} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.368411] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.368835] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 655.368835] env[62383]: value = "task-2451258" [ 655.368835] env[62383]: _type = "Task" [ 655.368835] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.369030] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7fb09e2d-7374-4e23-a603-0abdbb8e2a93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.378281] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451258, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.379502] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 655.379502] env[62383]: value = "task-2451259" [ 655.379502] env[62383]: _type = "Task" [ 655.379502] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.386977] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.569945] env[62383]: DEBUG oslo_vmware.api [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451256, 'name': PowerOffVM_Task, 'duration_secs': 0.181609} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.570288] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 655.570460] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 655.571417] env[62383]: DEBUG nova.network.neutron [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.573375] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bdb72eb-4110-488d-9690-a768929767b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.637517] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 655.638119] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 655.638554] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Deleting the datastore file [datastore2] 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 655.639223] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9aef266b-4df2-48c5-9026-78b1db6b383d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.650021] env[62383]: DEBUG oslo_vmware.api [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for the task: (returnval){ [ 655.650021] env[62383]: value = "task-2451261" [ 655.650021] env[62383]: _type = "Task" [ 655.650021] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.655204] env[62383]: DEBUG oslo_vmware.api [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.687876] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.690329] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.911s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.692243] env[62383]: INFO nova.compute.claims [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.722084] env[62383]: INFO nova.scheduler.client.report [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Deleted allocations for instance 0d992155-24fa-4836-83c9-8f188f7d7efa [ 655.755435] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "0f48434f-859f-4910-883f-2f81be647bad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.755892] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "0f48434f-859f-4910-883f-2f81be647bad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.818359] env[62383]: DEBUG nova.network.neutron [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Updating instance_info_cache with network_info: [{"id": "d251f129-de40-462a-86b9-50939d1a57c2", "address": "fa:16:3e:bb:fb:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd251f129-de", "ovs_interfaceid": "d251f129-de40-462a-86b9-50939d1a57c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.824987] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058701} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.825421] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.826313] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6005eb4f-ad42-43c5-b787-df5b1575e013 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.851294] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] 872ac212-9f29-426d-94c7-e1bf73aebd94/872ac212-9f29-426d-94c7-e1bf73aebd94.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.852219] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f01ded4-186c-4fe2-9ac6-a4b3e5da083e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.871886] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 655.871886] env[62383]: value = "task-2451262" [ 655.871886] env[62383]: _type = "Task" [ 655.871886] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.882984] env[62383]: DEBUG oslo_vmware.api [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451258, 'name': PowerOnVM_Task, 'duration_secs': 0.428376} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.889539] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 655.891354] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451262, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.892363] env[62383]: DEBUG nova.compute.manager [None req-e56ac0fc-0538-4546-aaff-f9fb83b5113b tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 655.893344] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5b9e4e-0914-44e0-99b8-09ec85a14a84 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.903590] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451259, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.156867] env[62383]: DEBUG oslo_vmware.api [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Task: {'id': task-2451261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137188} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.157150] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 656.157351] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 656.157529] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 656.157706] env[62383]: INFO nova.compute.manager [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Took 1.12 seconds to destroy the instance on the hypervisor. [ 656.158108] env[62383]: DEBUG oslo.service.loopingcall [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.158184] env[62383]: DEBUG nova.compute.manager [-] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 656.158231] env[62383]: DEBUG nova.network.neutron [-] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 656.228194] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b842920e-c911-4b76-a351-1858e7397556 tempest-ServerDiagnosticsV248Test-449558439 tempest-ServerDiagnosticsV248Test-449558439-project-member] Lock "0d992155-24fa-4836-83c9-8f188f7d7efa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.815s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.322525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Releasing lock "refresh_cache-9c2c55a9-5b24-4d52-8d6b-666609349a3a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.323176] env[62383]: DEBUG nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Instance network_info: |[{"id": "d251f129-de40-462a-86b9-50939d1a57c2", "address": "fa:16:3e:bb:fb:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd251f129-de", "ovs_interfaceid": "d251f129-de40-462a-86b9-50939d1a57c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.323779] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:fb:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd251f129-de40-462a-86b9-50939d1a57c2', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.338920] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Creating folder: Project (2ba1830673394f88a05df80fdba83729). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.339270] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49feb68d-84ab-4736-b5db-43d0813898fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.348913] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Created folder: Project (2ba1830673394f88a05df80fdba83729) in parent group-v496304. [ 656.349099] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Creating folder: Instances. Parent ref: group-v496393. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.349323] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9d43951-b5b4-42b2-b4ad-c66643c3d177 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.359089] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Created folder: Instances in parent group-v496393. [ 656.359333] env[62383]: DEBUG oslo.service.loopingcall [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.359543] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.359761] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aaae1068-1d6d-440b-947d-054057fe1f41 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.385412] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451262, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.388974] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.388974] env[62383]: value = "task-2451265" [ 656.388974] env[62383]: _type = "Task" [ 656.388974] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.395235] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451259, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.402089] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451265, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.595127] env[62383]: DEBUG nova.compute.manager [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Received event network-changed-d251f129-de40-462a-86b9-50939d1a57c2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 656.595315] env[62383]: DEBUG nova.compute.manager [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Refreshing instance network info cache due to event network-changed-d251f129-de40-462a-86b9-50939d1a57c2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 656.596722] env[62383]: DEBUG oslo_concurrency.lockutils [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] Acquiring lock "refresh_cache-9c2c55a9-5b24-4d52-8d6b-666609349a3a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.596958] env[62383]: DEBUG oslo_concurrency.lockutils [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] Acquired lock "refresh_cache-9c2c55a9-5b24-4d52-8d6b-666609349a3a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.597728] env[62383]: DEBUG nova.network.neutron [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Refreshing network info cache for port d251f129-de40-462a-86b9-50939d1a57c2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.889016] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451262, 'name': ReconfigVM_Task, 'duration_secs': 0.72616} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.892509] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Reconfigured VM instance instance-00000021 to attach disk [datastore2] 872ac212-9f29-426d-94c7-e1bf73aebd94/872ac212-9f29-426d-94c7-e1bf73aebd94.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.894349] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ead00f2e-34a1-44d0-abf3-a8cc1c23538a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.904121] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451259, 'name': PowerOnVM_Task, 'duration_secs': 1.174277} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.907031] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.907185] env[62383]: INFO nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Took 9.22 seconds to spawn the instance on the hypervisor. [ 656.907367] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.907593] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451265, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.907852] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 656.907852] env[62383]: value = "task-2451266" [ 656.907852] env[62383]: _type = "Task" [ 656.907852] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.910995] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0c0142-3f31-4e75-b57d-1c9297444094 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.927257] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451266, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.099351] env[62383]: DEBUG nova.network.neutron [-] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.279900] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a112992-e45d-482a-a2db-b97fa0b1d477 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.288119] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4902b9f-f3c5-40aa-9437-e3356e77281b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.323994] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441a4eb1-a66d-45d5-a6d1-58774a61b631 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.332061] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ff06ca-f0ff-4d00-80c9-7e8ed738917f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.348864] env[62383]: DEBUG nova.compute.provider_tree [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.402838] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451265, 'name': CreateVM_Task, 'duration_secs': 0.893295} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.404943] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 657.406085] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.406085] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.406085] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 657.406613] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2be946c3-6eb7-4dcc-bcbd-545a833197b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.411409] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 657.411409] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527c2a90-64b1-5800-fee1-00388ee42162" [ 657.411409] env[62383]: _type = "Task" [ 657.411409] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.421914] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527c2a90-64b1-5800-fee1-00388ee42162, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.425663] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451266, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.435790] env[62383]: INFO nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Took 40.24 seconds to build instance. [ 657.527069] env[62383]: DEBUG nova.network.neutron [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Updated VIF entry in instance network info cache for port d251f129-de40-462a-86b9-50939d1a57c2. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 657.527069] env[62383]: DEBUG nova.network.neutron [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Updating instance_info_cache with network_info: [{"id": "d251f129-de40-462a-86b9-50939d1a57c2", "address": "fa:16:3e:bb:fb:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd251f129-de", "ovs_interfaceid": "d251f129-de40-462a-86b9-50939d1a57c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.601823] env[62383]: INFO nova.compute.manager [-] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Took 1.44 seconds to deallocate network for instance. [ 657.852518] env[62383]: DEBUG nova.scheduler.client.report [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.928107] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527c2a90-64b1-5800-fee1-00388ee42162, 'name': SearchDatastore_Task, 'duration_secs': 0.032689} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.932371] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.933140] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.933676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.933917] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.934215] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 657.934605] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451266, 'name': Rename_Task, 'duration_secs': 0.908552} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.934872] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc66a50e-5872-493d-b1c9-69d41a5f1699 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.937107] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.938076] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.978s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.938782] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54c82956-2616-4d63-aae5-0655a0432dc0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.951646] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 657.951646] env[62383]: value = "task-2451267" [ 657.951646] env[62383]: _type = "Task" [ 657.951646] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.952135] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 657.952135] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 657.954682] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84d868a0-0a7a-4c56-ac80-ae5439366c68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.965107] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 657.965107] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b59591-bb0e-f608-da74-693af6fafa80" [ 657.965107] env[62383]: _type = "Task" [ 657.965107] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.971380] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451267, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.982736] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b59591-bb0e-f608-da74-693af6fafa80, 'name': SearchDatastore_Task, 'duration_secs': 0.009423} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.983555] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9757d509-ff3f-4700-abbd-d2f3c7c77b1b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.988586] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 657.988586] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b73406-7bcc-3247-1ab5-01fc7ec5cb72" [ 657.988586] env[62383]: _type = "Task" [ 657.988586] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.996724] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b73406-7bcc-3247-1ab5-01fc7ec5cb72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.028703] env[62383]: DEBUG oslo_concurrency.lockutils [req-fe31d227-9b9e-4807-9497-20674fc73eef req-a2711a53-ccd5-429a-a292-66cd9e47659f service nova] Releasing lock "refresh_cache-9c2c55a9-5b24-4d52-8d6b-666609349a3a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.109367] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.363165] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.673s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.363723] env[62383]: DEBUG nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 658.366340] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.590s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.366562] env[62383]: DEBUG nova.objects.instance [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 658.444723] env[62383]: DEBUG nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.463787] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451267, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.502369] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b73406-7bcc-3247-1ab5-01fc7ec5cb72, 'name': SearchDatastore_Task, 'duration_secs': 0.00806} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.502727] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.503050] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9c2c55a9-5b24-4d52-8d6b-666609349a3a/9c2c55a9-5b24-4d52-8d6b-666609349a3a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 658.503652] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebb45dce-a43c-478d-aadf-3da3568a315b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.514022] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 658.514022] env[62383]: value = "task-2451268" [ 658.514022] env[62383]: _type = "Task" [ 658.514022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.519359] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.570036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.570293] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.623107] env[62383]: DEBUG nova.compute.manager [req-9dc86b9a-486c-4bb3-be21-09985dc76ca3 req-f7abf23a-ce3e-4208-9473-7c25fc044886 service nova] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Received event network-vif-deleted-c5143583-b4ea-45e7-9c76-40bb80e9b004 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 658.870789] env[62383]: DEBUG nova.compute.utils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.872267] env[62383]: DEBUG nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.872443] env[62383]: DEBUG nova.network.neutron [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.966724] env[62383]: DEBUG oslo_vmware.api [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451267, 'name': PowerOnVM_Task, 'duration_secs': 0.694762} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.967079] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.967248] env[62383]: INFO nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Took 8.46 seconds to spawn the instance on the hypervisor. [ 658.967465] env[62383]: DEBUG nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.968283] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6bfbfa-0365-4222-8fcb-468e22fee373 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.971995] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 658.990771] env[62383]: DEBUG nova.policy [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '313394d2b7a24a20b12b72f37fd07c1f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ba1830673394f88a05df80fdba83729', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 659.029842] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480308} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.029842] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9c2c55a9-5b24-4d52-8d6b-666609349a3a/9c2c55a9-5b24-4d52-8d6b-666609349a3a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 659.029842] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 659.029842] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd80a8d9-df09-47b3-841f-3bf41077d76e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.036692] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 659.036692] env[62383]: value = "task-2451269" [ 659.036692] env[62383]: _type = "Task" [ 659.036692] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.042837] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451269, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.283756] env[62383]: INFO nova.compute.manager [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Rescuing [ 659.283756] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.283756] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.283756] env[62383]: DEBUG nova.network.neutron [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.378511] env[62383]: DEBUG nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 659.381708] env[62383]: DEBUG nova.network.neutron [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Successfully created port: 996369ec-24dc-43dd-8380-b1f7a35e6557 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.384522] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02de4812-b02a-4fe7-878d-257b1fb8a799 tempest-ServersAdmin275Test-138200586 tempest-ServersAdmin275Test-138200586-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.385768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.403s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.387578] env[62383]: INFO nova.compute.claims [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.488462] env[62383]: INFO nova.compute.manager [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Took 41.08 seconds to build instance. [ 659.549768] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451269, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108875} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.551834] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 659.553191] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb738738-783e-406c-a9b7-00ebf5b830bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.578351] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] 9c2c55a9-5b24-4d52-8d6b-666609349a3a/9c2c55a9-5b24-4d52-8d6b-666609349a3a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 659.578351] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c882cf80-3ac5-48cc-8eee-069438def661 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.601959] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 659.601959] env[62383]: value = "task-2451270" [ 659.601959] env[62383]: _type = "Task" [ 659.601959] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.611317] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451270, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.977224] env[62383]: DEBUG nova.network.neutron [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Updating instance_info_cache with network_info: [{"id": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "address": "fa:16:3e:2a:51:61", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c2cbd45-1a", "ovs_interfaceid": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.989993] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f549a03-627f-40cf-a6c6-da2661961fdc tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.969s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.111961] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451270, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.392311] env[62383]: DEBUG nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 660.418056] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 660.418290] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.418451] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 660.418811] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.418917] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 660.419131] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 660.419452] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 660.419674] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 660.419869] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 660.420112] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 660.420270] env[62383]: DEBUG nova.virt.hardware [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 660.421272] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1920e0-6d9c-43d6-bce4-89cb5ad0e09b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.432989] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d18643-02dc-463b-a511-29a939f3de35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.480185] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.492438] env[62383]: DEBUG nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.519879] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "330b5e35-3292-4df7-b288-547b158e671a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.519946] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "330b5e35-3292-4df7-b288-547b158e671a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.520648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "330b5e35-3292-4df7-b288-547b158e671a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.520648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "330b5e35-3292-4df7-b288-547b158e671a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.520648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "330b5e35-3292-4df7-b288-547b158e671a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.522561] env[62383]: INFO nova.compute.manager [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Terminating instance [ 660.613171] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451270, 'name': ReconfigVM_Task, 'duration_secs': 0.96394} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.615554] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Reconfigured VM instance instance-00000022 to attach disk [datastore2] 9c2c55a9-5b24-4d52-8d6b-666609349a3a/9c2c55a9-5b24-4d52-8d6b-666609349a3a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.616983] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fe82a76-7fbf-469c-a532-23aa9a48f730 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.622326] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 660.622326] env[62383]: value = "task-2451271" [ 660.622326] env[62383]: _type = "Task" [ 660.622326] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.631122] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451271, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.866684] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafd6904-000f-4dd8-9bc9-51028ac3a5f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.874184] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9111767b-aafb-4ec7-87aa-ff072cc9b955 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.905449] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ce1c04-147d-4450-80d4-9db455d0fd8c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.908896] env[62383]: DEBUG nova.compute.manager [req-8acf049f-974c-48de-bbd7-37d1efdf5f3a req-de45a9b1-f7f9-4b21-b060-f7c79a229cdc service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Received event network-vif-plugged-996369ec-24dc-43dd-8380-b1f7a35e6557 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 660.909114] env[62383]: DEBUG oslo_concurrency.lockutils [req-8acf049f-974c-48de-bbd7-37d1efdf5f3a req-de45a9b1-f7f9-4b21-b060-f7c79a229cdc service nova] Acquiring lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.909319] env[62383]: DEBUG oslo_concurrency.lockutils [req-8acf049f-974c-48de-bbd7-37d1efdf5f3a req-de45a9b1-f7f9-4b21-b060-f7c79a229cdc service nova] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.909505] env[62383]: DEBUG oslo_concurrency.lockutils [req-8acf049f-974c-48de-bbd7-37d1efdf5f3a req-de45a9b1-f7f9-4b21-b060-f7c79a229cdc service nova] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.909674] env[62383]: DEBUG nova.compute.manager [req-8acf049f-974c-48de-bbd7-37d1efdf5f3a req-de45a9b1-f7f9-4b21-b060-f7c79a229cdc service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] No waiting events found dispatching network-vif-plugged-996369ec-24dc-43dd-8380-b1f7a35e6557 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 660.909845] env[62383]: WARNING nova.compute.manager [req-8acf049f-974c-48de-bbd7-37d1efdf5f3a req-de45a9b1-f7f9-4b21-b060-f7c79a229cdc service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Received unexpected event network-vif-plugged-996369ec-24dc-43dd-8380-b1f7a35e6557 for instance with vm_state building and task_state spawning. [ 660.915547] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0e729b-e8e4-4b5e-896e-ab6c0c2dd895 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.929201] env[62383]: DEBUG nova.compute.provider_tree [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.999683] env[62383]: DEBUG nova.network.neutron [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Successfully updated port: 996369ec-24dc-43dd-8380-b1f7a35e6557 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.016819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.030790] env[62383]: DEBUG nova.compute.manager [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 661.030993] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 661.031853] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51963e8-da76-4177-9e90-92c2be79586a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.042725] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.043070] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff284848-c633-4d81-ad02-2f069c9e8c7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.049549] env[62383]: DEBUG oslo_vmware.api [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 661.049549] env[62383]: value = "task-2451272" [ 661.049549] env[62383]: _type = "Task" [ 661.049549] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.057385] env[62383]: DEBUG oslo_vmware.api [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.131763] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451271, 'name': Rename_Task, 'duration_secs': 0.130365} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.132040] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.132288] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f639eae6-8e49-460a-846d-67ccaea179e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.138501] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 661.138501] env[62383]: value = "task-2451273" [ 661.138501] env[62383]: _type = "Task" [ 661.138501] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.146699] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.432357] env[62383]: DEBUG nova.scheduler.client.report [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 661.503841] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "refresh_cache-f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.503959] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired lock "refresh_cache-f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.504153] env[62383]: DEBUG nova.network.neutron [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.561060] env[62383]: DEBUG oslo_vmware.api [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451272, 'name': PowerOffVM_Task, 'duration_secs': 0.229445} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.561060] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 661.561060] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 661.561060] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a41313f-5bfb-43d3-8c37-14a966e74082 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.623286] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 661.623513] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 661.623698] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleting the datastore file [datastore2] 330b5e35-3292-4df7-b288-547b158e671a {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 661.623966] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-429c6bd8-692d-428e-8718-beca38d0a683 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.631376] env[62383]: DEBUG oslo_vmware.api [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 661.631376] env[62383]: value = "task-2451275" [ 661.631376] env[62383]: _type = "Task" [ 661.631376] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.639646] env[62383]: DEBUG oslo_vmware.api [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.648306] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451273, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.938068] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.938694] env[62383]: DEBUG nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 661.941366] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.874s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.941580] env[62383]: DEBUG nova.objects.instance [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lazy-loading 'resources' on Instance uuid 69569fa0-5175-453e-9875-9ef46c723da8 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 662.018994] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 662.019305] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1a9ece3-fd07-4e8a-aa73-b59627a00563 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.028317] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 662.028317] env[62383]: value = "task-2451276" [ 662.028317] env[62383]: _type = "Task" [ 662.028317] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.037503] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.038781] env[62383]: DEBUG nova.network.neutron [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.141136] env[62383]: DEBUG oslo_vmware.api [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14828} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.144225] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 662.144418] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 662.144597] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.144771] env[62383]: INFO nova.compute.manager [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 662.145033] env[62383]: DEBUG oslo.service.loopingcall [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.145245] env[62383]: DEBUG nova.compute.manager [-] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 662.145358] env[62383]: DEBUG nova.network.neutron [-] [instance: 330b5e35-3292-4df7-b288-547b158e671a] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 662.152134] env[62383]: DEBUG oslo_vmware.api [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451273, 'name': PowerOnVM_Task, 'duration_secs': 0.638959} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.153210] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.153210] env[62383]: INFO nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Took 8.83 seconds to spawn the instance on the hypervisor. [ 662.153210] env[62383]: DEBUG nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.153472] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85d8dad-7506-400d-b5a1-b7415dd87c69 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.207876] env[62383]: DEBUG nova.network.neutron [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Updating instance_info_cache with network_info: [{"id": "996369ec-24dc-43dd-8380-b1f7a35e6557", "address": "fa:16:3e:ea:59:2c", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap996369ec-24", "ovs_interfaceid": "996369ec-24dc-43dd-8380-b1f7a35e6557", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.444554] env[62383]: DEBUG nova.compute.utils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.450707] env[62383]: DEBUG nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 662.450707] env[62383]: DEBUG nova.network.neutron [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 662.493942] env[62383]: DEBUG nova.policy [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d1b5b539901484bb6a18e9b3b462c6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbf8bc2ab78b4eedaca2c1ee32de8b27', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 662.540842] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451276, 'name': PowerOffVM_Task, 'duration_secs': 0.29163} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.541115] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 662.542097] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee97bb55-4d7b-4922-afa1-ff67ebb0c686 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.565168] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16b3fae-bbae-4e55-93e5-2f7f56710773 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.582357] env[62383]: DEBUG nova.compute.manager [req-1adbbca7-3ee4-4df7-9143-60db81b08189 req-ec82740f-893e-445d-bef4-44dd691e5737 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Received event network-vif-deleted-7d1b1f36-5137-4415-a93a-9ca34e00706d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 662.582547] env[62383]: INFO nova.compute.manager [req-1adbbca7-3ee4-4df7-9143-60db81b08189 req-ec82740f-893e-445d-bef4-44dd691e5737 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Neutron deleted interface 7d1b1f36-5137-4415-a93a-9ca34e00706d; detaching it from the instance and deleting it from the info cache [ 662.582710] env[62383]: DEBUG nova.network.neutron [req-1adbbca7-3ee4-4df7-9143-60db81b08189 req-ec82740f-893e-445d-bef4-44dd691e5737 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.606792] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 662.607264] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81cfd220-379e-4caa-9768-aafcc1945c22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.614181] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 662.614181] env[62383]: value = "task-2451277" [ 662.614181] env[62383]: _type = "Task" [ 662.614181] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.623685] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.669610] env[62383]: INFO nova.compute.manager [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Took 40.99 seconds to build instance. [ 662.710638] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Releasing lock "refresh_cache-f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.710975] env[62383]: DEBUG nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Instance network_info: |[{"id": "996369ec-24dc-43dd-8380-b1f7a35e6557", "address": "fa:16:3e:ea:59:2c", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap996369ec-24", "ovs_interfaceid": "996369ec-24dc-43dd-8380-b1f7a35e6557", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 662.712276] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:59:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '996369ec-24dc-43dd-8380-b1f7a35e6557', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.719336] env[62383]: DEBUG oslo.service.loopingcall [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.726896] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.729024] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9792d12-d408-4b6c-b631-8f46d101f715 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.758359] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.758359] env[62383]: value = "task-2451278" [ 662.758359] env[62383]: _type = "Task" [ 662.758359] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.769211] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451278, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.775123] env[62383]: DEBUG nova.network.neutron [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Successfully created port: 0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.952289] env[62383]: DEBUG nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 662.955934] env[62383]: DEBUG nova.network.neutron [-] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.063151] env[62383]: DEBUG nova.compute.manager [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Received event network-changed-996369ec-24dc-43dd-8380-b1f7a35e6557 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 663.063447] env[62383]: DEBUG nova.compute.manager [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Refreshing instance network info cache due to event network-changed-996369ec-24dc-43dd-8380-b1f7a35e6557. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 663.063732] env[62383]: DEBUG oslo_concurrency.lockutils [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] Acquiring lock "refresh_cache-f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.064690] env[62383]: DEBUG oslo_concurrency.lockutils [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] Acquired lock "refresh_cache-f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.064690] env[62383]: DEBUG nova.network.neutron [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Refreshing network info cache for port 996369ec-24dc-43dd-8380-b1f7a35e6557 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 663.067495] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2122de36-c039-4b1e-b9ee-b9869f7a0078 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.076258] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0824d8-480c-47c2-99e3-ee62a1b148bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.111967] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10180f10-5b26-4d5f-80ed-62a1cef28147 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.114632] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f163a95-a76d-49eb-b36f-5764aaa74484 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.129779] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0dfc4a5-a6a3-416d-bd69-8518616761fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.146291] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17cc2cb-4778-48a2-b836-72ef36dcf389 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.150589] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 663.150800] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.151049] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.151201] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.151420] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.151915] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cc6428b-b463-4e83-9e4d-40edf52abf9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.162717] env[62383]: DEBUG nova.compute.provider_tree [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.177236] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03d9f9c4-f371-4acd-896c-1ae98db060eb tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.295s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.177632] env[62383]: DEBUG nova.compute.manager [req-1adbbca7-3ee4-4df7-9143-60db81b08189 req-ec82740f-893e-445d-bef4-44dd691e5737 service nova] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Detach interface failed, port_id=7d1b1f36-5137-4415-a93a-9ca34e00706d, reason: Instance 330b5e35-3292-4df7-b288-547b158e671a could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 663.184149] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.184390] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.185200] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9663b0a-8f2c-46b2-a219-46bbc06b6f4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.191610] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 663.191610] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e746ba-3b73-4dd0-3959-6f70487c0a93" [ 663.191610] env[62383]: _type = "Task" [ 663.191610] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.202728] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e746ba-3b73-4dd0-3959-6f70487c0a93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.269259] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451278, 'name': CreateVM_Task, 'duration_secs': 0.466086} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.269464] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.270190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.270359] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.270695] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.270953] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a669a5dd-7d80-4e50-93d4-6ce57f00c477 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.275285] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 663.275285] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5257a859-84b6-1dc7-9707-b01d56f371e1" [ 663.275285] env[62383]: _type = "Task" [ 663.275285] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.282723] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5257a859-84b6-1dc7-9707-b01d56f371e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.458396] env[62383]: INFO nova.compute.manager [-] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Took 1.31 seconds to deallocate network for instance. [ 663.680192] env[62383]: DEBUG nova.scheduler.client.report [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 663.685305] env[62383]: DEBUG nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 663.705658] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e746ba-3b73-4dd0-3959-6f70487c0a93, 'name': SearchDatastore_Task, 'duration_secs': 0.010575} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.707113] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd686be8-df54-4f3c-8d98-7fceec5952aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.713019] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 663.713019] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524c9ac7-7128-50bc-c02c-4a581b4f1704" [ 663.713019] env[62383]: _type = "Task" [ 663.713019] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.720978] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524c9ac7-7128-50bc-c02c-4a581b4f1704, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.788531] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5257a859-84b6-1dc7-9707-b01d56f371e1, 'name': SearchDatastore_Task, 'duration_secs': 0.010963} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.788660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.788845] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.789076] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.789220] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.789393] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.789654] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e732522-a02e-4ae1-91f0-aaf8434b063f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.800980] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.801185] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.802094] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36df132a-d8a5-4811-aeec-d9beaafaf57c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.808635] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 663.808635] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52868121-1cc9-a450-c432-857fdb67fece" [ 663.808635] env[62383]: _type = "Task" [ 663.808635] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.816030] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52868121-1cc9-a450-c432-857fdb67fece, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.893229] env[62383]: DEBUG nova.network.neutron [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Updated VIF entry in instance network info cache for port 996369ec-24dc-43dd-8380-b1f7a35e6557. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 663.893229] env[62383]: DEBUG nova.network.neutron [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Updating instance_info_cache with network_info: [{"id": "996369ec-24dc-43dd-8380-b1f7a35e6557", "address": "fa:16:3e:ea:59:2c", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap996369ec-24", "ovs_interfaceid": "996369ec-24dc-43dd-8380-b1f7a35e6557", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.960665] env[62383]: DEBUG nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 663.964919] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.989580] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 663.989811] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.989966] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 663.990164] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.990314] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 663.990457] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 663.990661] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 663.990819] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 663.990985] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 663.991297] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 663.991336] env[62383]: DEBUG nova.virt.hardware [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 663.992213] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c4e1f8-2d6c-476b-92d3-62c7da6ab237 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.000098] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7d445f-a026-446b-bec1-469bde2abe41 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.191375] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.249s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.193144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.107s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.195439] env[62383]: INFO nova.compute.claims [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.212515] env[62383]: INFO nova.scheduler.client.report [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Deleted allocations for instance 69569fa0-5175-453e-9875-9ef46c723da8 [ 664.222156] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.232513] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524c9ac7-7128-50bc-c02c-4a581b4f1704, 'name': SearchDatastore_Task, 'duration_secs': 0.009964} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.232836] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.233145] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. {{(pid=62383) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 664.233420] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc0ebc20-aeaa-4275-b8f7-775f96507bc4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.241354] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 664.241354] env[62383]: value = "task-2451279" [ 664.241354] env[62383]: _type = "Task" [ 664.241354] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.253910] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.316323] env[62383]: DEBUG nova.network.neutron [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Successfully updated port: 0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 664.321988] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52868121-1cc9-a450-c432-857fdb67fece, 'name': SearchDatastore_Task, 'duration_secs': 0.032383} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.323909] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f9eeb2-0cdd-4b97-9b51-1678d21d331b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.330524] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 664.330524] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525a25c9-64b1-d490-e1e5-bd6e5c1ad506" [ 664.330524] env[62383]: _type = "Task" [ 664.330524] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.342336] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525a25c9-64b1-d490-e1e5-bd6e5c1ad506, 'name': SearchDatastore_Task, 'duration_secs': 0.009135} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.343044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.343383] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf/f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.343718] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-711417d0-58eb-4174-a5e1-8303a2c01654 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.351069] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 664.351069] env[62383]: value = "task-2451280" [ 664.351069] env[62383]: _type = "Task" [ 664.351069] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.360332] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.395676] env[62383]: DEBUG oslo_concurrency.lockutils [req-01b4e5fe-9cbc-47e9-af25-eaac3c476883 req-186005ad-25b5-42a2-99f0-b8bbb329cc22 service nova] Releasing lock "refresh_cache-f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.683395] env[62383]: DEBUG nova.compute.manager [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Received event network-vif-plugged-0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 664.683628] env[62383]: DEBUG oslo_concurrency.lockutils [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] Acquiring lock "17498cb6-8b16-4a2e-96ae-c594966cee77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.683848] env[62383]: DEBUG oslo_concurrency.lockutils [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.684015] env[62383]: DEBUG oslo_concurrency.lockutils [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.684990] env[62383]: DEBUG nova.compute.manager [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] No waiting events found dispatching network-vif-plugged-0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 664.684990] env[62383]: WARNING nova.compute.manager [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Received unexpected event network-vif-plugged-0a1882ce-460b-4676-b041-6cc7defcf5f5 for instance with vm_state building and task_state spawning. [ 664.684990] env[62383]: DEBUG nova.compute.manager [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Received event network-changed-0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 664.684990] env[62383]: DEBUG nova.compute.manager [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Refreshing instance network info cache due to event network-changed-0a1882ce-460b-4676-b041-6cc7defcf5f5. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 664.685305] env[62383]: DEBUG oslo_concurrency.lockutils [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] Acquiring lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.685447] env[62383]: DEBUG oslo_concurrency.lockutils [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] Acquired lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.686095] env[62383]: DEBUG nova.network.neutron [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Refreshing network info cache for port 0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.729886] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2c2682c-bef2-4719-9896-e71442386225 tempest-ServersAdmin275Test-1537639133 tempest-ServersAdmin275Test-1537639133-project-member] Lock "69569fa0-5175-453e-9875-9ef46c723da8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.989s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.757697] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451279, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506977} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.758059] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. [ 664.759336] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e999a246-c63e-4afb-a8ae-40d8b124070a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.800936] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.800936] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d836331-caee-47f7-9f0b-f6dfff88da5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.820348] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 664.820348] env[62383]: value = "task-2451281" [ 664.820348] env[62383]: _type = "Task" [ 664.820348] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.824648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.832524] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451281, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.861394] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451280, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.229251] env[62383]: DEBUG nova.network.neutron [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.335769] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451281, 'name': ReconfigVM_Task, 'duration_secs': 0.427152} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.336585] env[62383]: DEBUG nova.network.neutron [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.340102] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Reconfigured VM instance instance-0000001d to attach disk [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.341593] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c90b89f-c309-4349-a912-bc1462e3433f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.377951] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43d551e5-5532-4524-bacf-2dbb44c0e0b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.395252] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686659} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.398092] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf/f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.398092] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.398092] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 665.398092] env[62383]: value = "task-2451282" [ 665.398092] env[62383]: _type = "Task" [ 665.398092] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.398092] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8681948a-3d4b-4c08-9282-5f5f70e91442 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.411041] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451282, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.412402] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 665.412402] env[62383]: value = "task-2451283" [ 665.412402] env[62383]: _type = "Task" [ 665.412402] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.424172] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451283, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.839086] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83fae76-608e-428e-a0ad-ecd16e8aa3ff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.842137] env[62383]: DEBUG oslo_concurrency.lockutils [req-857a3dc6-78b5-46ae-adaa-80968753e6e3 req-ac5ef158-02cf-4d48-acc1-83fdf41c471a service nova] Releasing lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.842653] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquired lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.842847] env[62383]: DEBUG nova.network.neutron [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.849146] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67667cc4-e8ad-41e0-b3e9-272455448f4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.889517] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24692b88-cac7-48a1-bcbc-cc30c03f9e40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.897152] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b7bc1c-174b-433c-b067-5519375b9d5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.914588] env[62383]: DEBUG nova.compute.provider_tree [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.922445] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451282, 'name': ReconfigVM_Task, 'duration_secs': 0.196365} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.922578] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 665.923561] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a70fd510-feee-437c-bbf4-08f827cc37fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.929478] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451283, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155908} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.930101] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 665.930869] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6391009-6e62-46a9-bc77-e9186c6c6535 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.934669] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 665.934669] env[62383]: value = "task-2451284" [ 665.934669] env[62383]: _type = "Task" [ 665.934669] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.955743] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf/f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.956747] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f47854d5-f9c2-4e43-affb-de5375d9d671 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.976839] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451284, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.982289] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 665.982289] env[62383]: value = "task-2451285" [ 665.982289] env[62383]: _type = "Task" [ 665.982289] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.989827] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451285, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.395845] env[62383]: DEBUG nova.network.neutron [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.427784] env[62383]: DEBUG nova.scheduler.client.report [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 666.447147] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451284, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.493980] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.696294] env[62383]: DEBUG nova.network.neutron [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updating instance_info_cache with network_info: [{"id": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "address": "fa:16:3e:9f:03:f6", "network": {"id": "2e7ea332-ca96-4fb8-9325-de5e215d98a9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1124068406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbf8bc2ab78b4eedaca2c1ee32de8b27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a1882ce-46", "ovs_interfaceid": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.933776] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.741s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.934324] env[62383]: DEBUG nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 666.937146] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.867s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.938529] env[62383]: INFO nova.compute.claims [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 666.949952] env[62383]: DEBUG oslo_vmware.api [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451284, 'name': PowerOnVM_Task, 'duration_secs': 0.660021} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.949952] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 666.953862] env[62383]: DEBUG nova.compute.manager [None req-9d5b1e3d-a8b6-43e1-b0cd-2ee152db05ac tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 666.954013] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff616515-4b78-4c1a-bda7-7ce8a5008dfc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.999210] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451285, 'name': ReconfigVM_Task, 'duration_secs': 0.634515} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.999689] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Reconfigured VM instance instance-00000023 to attach disk [datastore1] f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf/f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 667.000412] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34907256-14f8-4507-b854-507525084bd7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.007165] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 667.007165] env[62383]: value = "task-2451286" [ 667.007165] env[62383]: _type = "Task" [ 667.007165] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.016290] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451286, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.199314] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Releasing lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.199643] env[62383]: DEBUG nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Instance network_info: |[{"id": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "address": "fa:16:3e:9f:03:f6", "network": {"id": "2e7ea332-ca96-4fb8-9325-de5e215d98a9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1124068406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbf8bc2ab78b4eedaca2c1ee32de8b27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a1882ce-46", "ovs_interfaceid": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.200156] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:03:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'edd47158-6f4b-44a1-8e82-0411205ad299', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a1882ce-460b-4676-b041-6cc7defcf5f5', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.210190] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Creating folder: Project (dbf8bc2ab78b4eedaca2c1ee32de8b27). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.210433] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93a0b18a-6beb-47ea-a470-165f2c81a3e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.221283] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Created folder: Project (dbf8bc2ab78b4eedaca2c1ee32de8b27) in parent group-v496304. [ 667.221475] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Creating folder: Instances. Parent ref: group-v496397. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.221711] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52f9428e-a8d8-43fa-baab-3d4d77b00506 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.231590] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Created folder: Instances in parent group-v496397. [ 667.231826] env[62383]: DEBUG oslo.service.loopingcall [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.232014] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.232216] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed191253-943d-491a-807a-b065e55493ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.251141] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.251141] env[62383]: value = "task-2451289" [ 667.251141] env[62383]: _type = "Task" [ 667.251141] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.263984] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451289, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.443057] env[62383]: DEBUG nova.compute.utils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 667.447351] env[62383]: DEBUG nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 667.449890] env[62383]: DEBUG nova.network.neutron [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 667.507222] env[62383]: DEBUG nova.policy [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9c77afeb30749d09c8ee611adb97c09', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '89a4cd88e497492da719341b40576b18', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 667.523505] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451286, 'name': Rename_Task, 'duration_secs': 0.157819} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.523795] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 667.524106] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-789cfb0e-c214-49f9-b2ab-98351de6a7e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.531090] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 667.531090] env[62383]: value = "task-2451290" [ 667.531090] env[62383]: _type = "Task" [ 667.531090] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.539850] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.762159] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451289, 'name': CreateVM_Task, 'duration_secs': 0.407718} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.762531] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 667.763369] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.763726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.764212] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 667.764600] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d42823b-d63f-4fc8-9b13-fce545add428 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.772387] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 667.772387] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5202c5e3-b27c-4497-0841-a13ad8dcc94e" [ 667.772387] env[62383]: _type = "Task" [ 667.772387] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.779912] env[62383]: INFO nova.compute.manager [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Unrescuing [ 667.780295] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.780533] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.781038] env[62383]: DEBUG nova.network.neutron [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.782360] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5202c5e3-b27c-4497-0841-a13ad8dcc94e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.948387] env[62383]: DEBUG nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.059647] env[62383]: DEBUG nova.network.neutron [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Successfully created port: c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.065330] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451290, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.280802] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5202c5e3-b27c-4497-0841-a13ad8dcc94e, 'name': SearchDatastore_Task, 'duration_secs': 0.015746} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.281120] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.281353] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.281580] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.281722] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.281899] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 668.282173] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e59b7e60-4535-404b-8a4a-9ec221c5b206 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.292641] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 668.292928] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 668.293618] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89fae68d-bec9-4387-95ab-f97af9f996c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.300025] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 668.300025] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5243eb03-0748-4ffd-954d-000a1bf39fb3" [ 668.300025] env[62383]: _type = "Task" [ 668.300025] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.308928] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5243eb03-0748-4ffd-954d-000a1bf39fb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.548169] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451290, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.606820] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0e2db6-e704-41ec-811c-6df9f4c2bbdd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.615990] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514e7648-1ae1-4874-98f4-8f14b2efbc79 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.651855] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7022372-f388-4342-82d5-a55aa8967b1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.660911] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18549674-c510-475c-917f-1d7b4d910d8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.678211] env[62383]: DEBUG nova.compute.provider_tree [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.728951] env[62383]: DEBUG nova.network.neutron [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Updating instance_info_cache with network_info: [{"id": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "address": "fa:16:3e:2a:51:61", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c2cbd45-1a", "ovs_interfaceid": "3c2cbd45-1a44-495a-bfe1-6e6f90985ded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.812838] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5243eb03-0748-4ffd-954d-000a1bf39fb3, 'name': SearchDatastore_Task, 'duration_secs': 0.012836} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.817020] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eda2bd34-3622-4cad-a18d-1a76a2bc75e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.819912] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 668.819912] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52036bd0-1395-7cab-1f2c-bf32f9e3da62" [ 668.819912] env[62383]: _type = "Task" [ 668.819912] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.830825] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52036bd0-1395-7cab-1f2c-bf32f9e3da62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.968541] env[62383]: DEBUG nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 668.997034] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 668.997034] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 668.997034] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 668.997350] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 668.997350] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 668.997350] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 668.997350] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 668.999232] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 668.999453] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 668.999745] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 668.999994] env[62383]: DEBUG nova.virt.hardware [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.001286] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8021cc-4deb-44d9-a4f5-e00a03160de7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.012676] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7f0ad8-3c46-40d9-875b-cdd74eecd54a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.044994] env[62383]: DEBUG oslo_vmware.api [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451290, 'name': PowerOnVM_Task, 'duration_secs': 1.143558} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.045286] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 669.045491] env[62383]: INFO nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Took 8.65 seconds to spawn the instance on the hypervisor. [ 669.045671] env[62383]: DEBUG nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 669.046489] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ead33e0-c34d-4f6a-91aa-d9123ff196de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.181601] env[62383]: DEBUG nova.scheduler.client.report [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.231866] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-152567ba-f24c-4674-b06e-98c76a3da324" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.233177] env[62383]: DEBUG nova.objects.instance [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lazy-loading 'flavor' on Instance uuid 152567ba-f24c-4674-b06e-98c76a3da324 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 669.331314] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52036bd0-1395-7cab-1f2c-bf32f9e3da62, 'name': SearchDatastore_Task, 'duration_secs': 0.010137} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.331511] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.332058] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 17498cb6-8b16-4a2e-96ae-c594966cee77/17498cb6-8b16-4a2e-96ae-c594966cee77.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 669.332212] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5437e165-8cc3-4d9a-8eb3-058a57d9cedf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.339568] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 669.339568] env[62383]: value = "task-2451291" [ 669.339568] env[62383]: _type = "Task" [ 669.339568] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.348148] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.570932] env[62383]: INFO nova.compute.manager [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Took 42.82 seconds to build instance. [ 669.687817] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.691148] env[62383]: DEBUG nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 669.692306] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.514s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.692590] env[62383]: DEBUG nova.objects.instance [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lazy-loading 'resources' on Instance uuid 8e911bad-5408-4588-9865-912ce4457d34 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 669.743391] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df1848f-48f9-4447-9d2a-b62dd3f99c11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.789136] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 669.789301] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fdc9a79a-935f-4059-8ede-f823b2cef813 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.794951] env[62383]: DEBUG nova.network.neutron [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Successfully updated port: c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.798084] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 669.798084] env[62383]: value = "task-2451292" [ 669.798084] env[62383]: _type = "Task" [ 669.798084] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.807633] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451292, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.809817] env[62383]: DEBUG nova.compute.manager [req-8a1bad28-3362-4a23-92e0-aecf59335ead req-a66c71d4-1ccb-457d-8be2-e66ac1405f12 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Received event network-vif-plugged-c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 669.810076] env[62383]: DEBUG oslo_concurrency.lockutils [req-8a1bad28-3362-4a23-92e0-aecf59335ead req-a66c71d4-1ccb-457d-8be2-e66ac1405f12 service nova] Acquiring lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.810375] env[62383]: DEBUG oslo_concurrency.lockutils [req-8a1bad28-3362-4a23-92e0-aecf59335ead req-a66c71d4-1ccb-457d-8be2-e66ac1405f12 service nova] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.810620] env[62383]: DEBUG oslo_concurrency.lockutils [req-8a1bad28-3362-4a23-92e0-aecf59335ead req-a66c71d4-1ccb-457d-8be2-e66ac1405f12 service nova] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.810894] env[62383]: DEBUG nova.compute.manager [req-8a1bad28-3362-4a23-92e0-aecf59335ead req-a66c71d4-1ccb-457d-8be2-e66ac1405f12 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] No waiting events found dispatching network-vif-plugged-c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 669.811354] env[62383]: WARNING nova.compute.manager [req-8a1bad28-3362-4a23-92e0-aecf59335ead req-a66c71d4-1ccb-457d-8be2-e66ac1405f12 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Received unexpected event network-vif-plugged-c0a30947-ef63-4154-9495-4bb92c6a0578 for instance with vm_state building and task_state spawning. [ 669.850783] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451291, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.075491] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7b9c7ebb-ac60-4352-ba9d-f7befdf0cc54 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.685s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.200174] env[62383]: DEBUG nova.compute.utils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.201861] env[62383]: DEBUG nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.201861] env[62383]: DEBUG nova.network.neutron [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 670.272821] env[62383]: DEBUG nova.policy [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bda8cb7b1005458ca6fc7e5ca6882e6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '603ba5501c904542b6ff0935f620e6da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 670.299261] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.299261] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquired lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.299621] env[62383]: DEBUG nova.network.neutron [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.317852] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451292, 'name': PowerOffVM_Task, 'duration_secs': 0.355467} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.318371] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 670.327877] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Reconfiguring VM instance instance-0000001d to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 670.331274] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a71e198-9d42-4eb1-acc3-4f8a2972a64f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.357080] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548953} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.358602] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 17498cb6-8b16-4a2e-96ae-c594966cee77/17498cb6-8b16-4a2e-96ae-c594966cee77.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 670.358901] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 670.359290] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 670.359290] env[62383]: value = "task-2451293" [ 670.359290] env[62383]: _type = "Task" [ 670.359290] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.359777] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea768439-41c2-42c6-9cc0-eb62199b0af6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.371450] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451293, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.375561] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 670.375561] env[62383]: value = "task-2451294" [ 670.375561] env[62383]: _type = "Task" [ 670.375561] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.388318] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.580300] env[62383]: DEBUG nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 670.711203] env[62383]: DEBUG nova.network.neutron [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Successfully created port: 9648e8c9-183c-4825-9b3d-25732ebd4892 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 670.713370] env[62383]: DEBUG nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 670.857617] env[62383]: DEBUG nova.network.neutron [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 670.873808] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451293, 'name': ReconfigVM_Task, 'duration_secs': 0.285195} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.874101] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Reconfigured VM instance instance-0000001d to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 670.874287] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 670.874531] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb93b6fb-e603-4926-9aac-bfe0e4cc82d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.887667] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 670.887667] env[62383]: value = "task-2451297" [ 670.887667] env[62383]: _type = "Task" [ 670.887667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.892293] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07048} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.893051] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea09b60-f456-4c3d-b2c8-ff23d220b7da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.899506] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 670.899506] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2fcc9d-e91e-42af-a80d-d8b2266859d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.922542] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66525ece-4721-44e7-b8c8-614865b3606c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.927446] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451297, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.935217] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 17498cb6-8b16-4a2e-96ae-c594966cee77/17498cb6-8b16-4a2e-96ae-c594966cee77.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 670.938013] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8b8f4f0-8374-478f-90da-9ea6ddc2f4bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.988023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517e266f-b39f-4ecd-8916-21bebdf64cec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.988328] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 670.988328] env[62383]: value = "task-2451298" [ 670.988328] env[62383]: _type = "Task" [ 670.988328] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.995793] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51737d3e-0112-42d0-80d7-7e448f572fb6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.005532] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.016179] env[62383]: DEBUG nova.compute.provider_tree [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.108780] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.172816] env[62383]: DEBUG nova.network.neutron [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.402093] env[62383]: DEBUG oslo_vmware.api [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451297, 'name': PowerOnVM_Task, 'duration_secs': 0.411306} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.402530] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.402814] env[62383]: DEBUG nova.compute.manager [None req-9271c2bf-ee26-403e-bb2c-6a173400c234 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.403724] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b7119e-2510-41f4-8aec-792d0a8cb1f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.497579] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451298, 'name': ReconfigVM_Task, 'duration_secs': 0.49732} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.497906] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 17498cb6-8b16-4a2e-96ae-c594966cee77/17498cb6-8b16-4a2e-96ae-c594966cee77.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 671.498680] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b74002ca-d164-4eed-863a-46489b13f6e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.505391] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 671.505391] env[62383]: value = "task-2451301" [ 671.505391] env[62383]: _type = "Task" [ 671.505391] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.514608] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451301, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.522686] env[62383]: DEBUG nova.scheduler.client.report [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 671.675809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Releasing lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.676172] env[62383]: DEBUG nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Instance network_info: |[{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 671.676742] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:da:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '158692b5-b9fb-49e8-9903-e742ffd6c168', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0a30947-ef63-4154-9495-4bb92c6a0578', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 671.684472] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Creating folder: Project (89a4cd88e497492da719341b40576b18). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.685027] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb2676d8-673a-46ba-b918-a27489e47168 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.695742] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Created folder: Project (89a4cd88e497492da719341b40576b18) in parent group-v496304. [ 671.695945] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Creating folder: Instances. Parent ref: group-v496403. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 671.696209] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e838a64-fcf4-4d7c-8897-cc5f8b8ede98 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.705109] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Created folder: Instances in parent group-v496403. [ 671.705353] env[62383]: DEBUG oslo.service.loopingcall [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 671.705555] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 671.705769] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-369c8b57-b715-49c6-9a7b-cd861990db7b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.720729] env[62383]: DEBUG nova.compute.manager [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.721605] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f10470a-2c3a-44e4-963f-185ca486c620 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.725239] env[62383]: DEBUG nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 671.730307] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 671.730307] env[62383]: value = "task-2451304" [ 671.730307] env[62383]: _type = "Task" [ 671.730307] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.745681] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451304, 'name': CreateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.755392] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 671.757170] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.757170] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 671.757170] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.757170] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 671.757170] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 671.757374] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 671.757374] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 671.757374] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 671.757374] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 671.757374] env[62383]: DEBUG nova.virt.hardware [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 671.758328] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63947830-e900-4366-80ab-d298ef67b7b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.768694] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1342137c-39f6-489d-9037-9047bb28df3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.872899] env[62383]: DEBUG nova.compute.manager [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Received event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 671.873098] env[62383]: DEBUG nova.compute.manager [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing instance network info cache due to event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 671.873318] env[62383]: DEBUG oslo_concurrency.lockutils [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] Acquiring lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.873445] env[62383]: DEBUG oslo_concurrency.lockutils [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] Acquired lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.873604] env[62383]: DEBUG nova.network.neutron [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 672.017382] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451301, 'name': Rename_Task, 'duration_secs': 0.304305} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.018436] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 672.018436] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5a0b0a8-ac46-46ce-8785-cf984ec59c60 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.024867] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 672.024867] env[62383]: value = "task-2451305" [ 672.024867] env[62383]: _type = "Task" [ 672.024867] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.028666] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.033661] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.432s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.033918] env[62383]: DEBUG nova.objects.instance [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lazy-loading 'resources' on Instance uuid 60535a30-4602-4063-94a4-30ed01266d5b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 672.044697] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.059226] env[62383]: INFO nova.scheduler.client.report [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Deleted allocations for instance 8e911bad-5408-4588-9865-912ce4457d34 [ 672.242500] env[62383]: INFO nova.compute.manager [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] instance snapshotting [ 672.246675] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67193e5-3c21-4d31-b986-f92a05578544 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.254577] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451304, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.272245] env[62383]: DEBUG nova.network.neutron [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Successfully updated port: 9648e8c9-183c-4825-9b3d-25732ebd4892 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 672.274162] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072a1f0d-5aab-4257-a114-94c1b94343bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.542008] env[62383]: DEBUG oslo_vmware.api [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451305, 'name': PowerOnVM_Task, 'duration_secs': 0.486268} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.543407] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 672.543611] env[62383]: INFO nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Took 8.58 seconds to spawn the instance on the hypervisor. [ 672.546020] env[62383]: DEBUG nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 672.548971] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cec699b-b11b-468c-85aa-142d4b208c5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.569792] env[62383]: DEBUG oslo_concurrency.lockutils [None req-417266de-70ca-4a6d-833a-3c8e769a8c86 tempest-ServersAdminNegativeTestJSON-1014568749 tempest-ServersAdminNegativeTestJSON-1014568749-project-member] Lock "8e911bad-5408-4588-9865-912ce4457d34" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.986s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.607014] env[62383]: DEBUG nova.network.neutron [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updated VIF entry in instance network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 672.607162] env[62383]: DEBUG nova.network.neutron [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.744386] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451304, 'name': CreateVM_Task, 'duration_secs': 0.573935} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.746805] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.747641] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.747804] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.748129] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 672.748376] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9feaaaad-25cf-49b8-8d5a-c74c4e44f6cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.753408] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 672.753408] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b3a005-a360-cd92-0165-39add6dceb9a" [ 672.753408] env[62383]: _type = "Task" [ 672.753408] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.763337] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b3a005-a360-cd92-0165-39add6dceb9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.778752] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-a16193af-410e-4bf6-bb06-a97791cf6060" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.778752] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-a16193af-410e-4bf6-bb06-a97791cf6060" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.778752] env[62383]: DEBUG nova.network.neutron [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 672.786485] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 672.787014] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0f478a4c-e573-4638-af78-f4d4832f8402 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.795450] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 672.795450] env[62383]: value = "task-2451306" [ 672.795450] env[62383]: _type = "Task" [ 672.795450] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.805740] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451306, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.047819] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc4e07f-7071-4c64-82d8-3fa4023f0f26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.056185] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82e1612-37f1-4f7f-a580-42debc99c5aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.067066] env[62383]: INFO nova.compute.manager [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Took 43.11 seconds to build instance. [ 673.091498] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5a75bdb1-6403-4fb0-b9fb-deaee2129c5f tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.033s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.092242] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7130fc5c-1e6b-4b5a-8db5-0603a81cee22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.101222] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec84d239-1365-4a73-a1b3-68d4f6a223c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.116720] env[62383]: DEBUG oslo_concurrency.lockutils [req-eefa0b87-4e43-41e9-9780-da17d0409dd9 req-26f60a55-9aa3-437b-ad9d-945d949174dc service nova] Releasing lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.117506] env[62383]: DEBUG nova.compute.provider_tree [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.264778] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b3a005-a360-cd92-0165-39add6dceb9a, 'name': SearchDatastore_Task, 'duration_secs': 0.029648} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.265101] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.265322] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.265556] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.265696] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.265912] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.266144] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4286295a-aaf6-4e1f-a978-9352398ecd50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.275565] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.275565] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.275992] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cf7bec2-1f19-454a-9b0a-a4488fee04c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.281400] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 673.281400] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5273448e-69f2-a11f-4b56-ae2a2049e096" [ 673.281400] env[62383]: _type = "Task" [ 673.281400] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.291579] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5273448e-69f2-a11f-4b56-ae2a2049e096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.304252] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451306, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.325952] env[62383]: DEBUG nova.network.neutron [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.501629] env[62383]: DEBUG nova.network.neutron [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Updating instance_info_cache with network_info: [{"id": "9648e8c9-183c-4825-9b3d-25732ebd4892", "address": "fa:16:3e:45:ef:9d", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9648e8c9-18", "ovs_interfaceid": "9648e8c9-183c-4825-9b3d-25732ebd4892", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.596670] env[62383]: DEBUG nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 673.620887] env[62383]: DEBUG nova.scheduler.client.report [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 673.792472] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5273448e-69f2-a11f-4b56-ae2a2049e096, 'name': SearchDatastore_Task, 'duration_secs': 0.008745} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.793294] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf30443e-1284-415b-8cec-dcbaca86df1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.802984] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 673.802984] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b14ddb-80a9-889d-b8df-f34210feb2b8" [ 673.802984] env[62383]: _type = "Task" [ 673.802984] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.811179] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451306, 'name': CreateSnapshot_Task, 'duration_secs': 0.889426} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.811850] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 673.812617] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e90ec33-7d99-439f-a2ff-856ac02f5c9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.819325] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b14ddb-80a9-889d-b8df-f34210feb2b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.004462] env[62383]: DEBUG nova.compute.manager [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Received event network-vif-plugged-9648e8c9-183c-4825-9b3d-25732ebd4892 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 674.004695] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] Acquiring lock "a16193af-410e-4bf6-bb06-a97791cf6060-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.005042] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] Lock "a16193af-410e-4bf6-bb06-a97791cf6060-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.005931] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] Lock "a16193af-410e-4bf6-bb06-a97791cf6060-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.005931] env[62383]: DEBUG nova.compute.manager [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] No waiting events found dispatching network-vif-plugged-9648e8c9-183c-4825-9b3d-25732ebd4892 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 674.005931] env[62383]: WARNING nova.compute.manager [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Received unexpected event network-vif-plugged-9648e8c9-183c-4825-9b3d-25732ebd4892 for instance with vm_state building and task_state spawning. [ 674.005931] env[62383]: DEBUG nova.compute.manager [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Received event network-changed-9648e8c9-183c-4825-9b3d-25732ebd4892 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 674.005931] env[62383]: DEBUG nova.compute.manager [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Refreshing instance network info cache due to event network-changed-9648e8c9-183c-4825-9b3d-25732ebd4892. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 674.006234] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] Acquiring lock "refresh_cache-a16193af-410e-4bf6-bb06-a97791cf6060" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.007134] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-a16193af-410e-4bf6-bb06-a97791cf6060" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.007422] env[62383]: DEBUG nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Instance network_info: |[{"id": "9648e8c9-183c-4825-9b3d-25732ebd4892", "address": "fa:16:3e:45:ef:9d", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9648e8c9-18", "ovs_interfaceid": "9648e8c9-183c-4825-9b3d-25732ebd4892", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 674.007694] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] Acquired lock "refresh_cache-a16193af-410e-4bf6-bb06-a97791cf6060" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.007861] env[62383]: DEBUG nova.network.neutron [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Refreshing network info cache for port 9648e8c9-183c-4825-9b3d-25732ebd4892 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 674.011051] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:ef:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9648e8c9-183c-4825-9b3d-25732ebd4892', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 674.019451] env[62383]: DEBUG oslo.service.loopingcall [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.020377] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 674.020640] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aec171bd-6ff8-4829-9346-12cf294c14d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.051319] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 674.051319] env[62383]: value = "task-2451307" [ 674.051319] env[62383]: _type = "Task" [ 674.051319] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.063320] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451307, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.122942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.128949] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.129605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.082s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.132089] env[62383]: INFO nova.compute.claims [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.151416] env[62383]: INFO nova.scheduler.client.report [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Deleted allocations for instance 60535a30-4602-4063-94a4-30ed01266d5b [ 674.319529] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b14ddb-80a9-889d-b8df-f34210feb2b8, 'name': SearchDatastore_Task, 'duration_secs': 0.034315} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.319848] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.320120] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93/57eaad0a-ca55-4bff-bbd0-6155ecf1cb93.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 674.320389] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-764ebf02-f330-48e9-b258-1d7fbbcd38c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.337555] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 674.339445] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-189d9727-e617-47ce-8e24-4dd94e15d3ec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.343543] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 674.343543] env[62383]: value = "task-2451309" [ 674.343543] env[62383]: _type = "Task" [ 674.343543] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.357853] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 674.357853] env[62383]: value = "task-2451310" [ 674.357853] env[62383]: _type = "Task" [ 674.357853] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.358231] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.369819] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451310, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.569953] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451307, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.658747] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ef02fbf7-cc54-49f7-b656-2d411941dd1e tempest-DeleteServersAdminTestJSON-1856257779 tempest-DeleteServersAdminTestJSON-1856257779-project-member] Lock "60535a30-4602-4063-94a4-30ed01266d5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.950s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.766947] env[62383]: DEBUG nova.network.neutron [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Updated VIF entry in instance network info cache for port 9648e8c9-183c-4825-9b3d-25732ebd4892. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 674.767420] env[62383]: DEBUG nova.network.neutron [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Updating instance_info_cache with network_info: [{"id": "9648e8c9-183c-4825-9b3d-25732ebd4892", "address": "fa:16:3e:45:ef:9d", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9648e8c9-18", "ovs_interfaceid": "9648e8c9-183c-4825-9b3d-25732ebd4892", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.859506] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451309, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.872885] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451310, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.064284] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451307, 'name': CreateVM_Task, 'duration_secs': 0.565119} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.064807] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 675.065228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.065402] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.065768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 675.066092] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4a3700e-5996-4318-b772-2f960f62cf74 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.073617] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 675.073617] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c50543-5e80-f70a-8aac-f527909bbb50" [ 675.073617] env[62383]: _type = "Task" [ 675.073617] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.084395] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c50543-5e80-f70a-8aac-f527909bbb50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.273373] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaf45513-76c5-4dd4-ae4b-225b290bc48f req-40886bed-084e-4c16-b50e-4fc9ff90b97f service nova] Releasing lock "refresh_cache-a16193af-410e-4bf6-bb06-a97791cf6060" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.357640] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.821841} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.358094] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93/57eaad0a-ca55-4bff-bbd0-6155ecf1cb93.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 675.358995] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.359964] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67946ff1-2b42-432c-b528-8ed1bf6aaa93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.376550] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451310, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.378267] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 675.378267] env[62383]: value = "task-2451311" [ 675.378267] env[62383]: _type = "Task" [ 675.378267] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.390461] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451311, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.588476] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c50543-5e80-f70a-8aac-f527909bbb50, 'name': SearchDatastore_Task, 'duration_secs': 0.052253} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.591241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.591549] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.591726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.591873] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.592061] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 675.592520] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-682050a5-7ac9-4e2a-b2ac-c0f4bdf8e3a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.610973] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 675.611193] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 675.612158] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba997006-a09f-4f74-a08e-572329a65805 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.618510] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 675.618510] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a59bc2-df9a-2ba6-4cca-2d9ea38deed9" [ 675.618510] env[62383]: _type = "Task" [ 675.618510] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.633592] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a59bc2-df9a-2ba6-4cca-2d9ea38deed9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.708117] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceeec585-f02b-4495-ac4f-48e4b2ae9300 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.717374] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51d89fa-b724-41f4-afc8-fbd37261b7a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.752413] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619788fe-94a9-4a53-8492-f8cf7d1655c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.761224] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0599517-0284-4abc-912d-adb9c4236dff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.775846] env[62383]: DEBUG nova.compute.provider_tree [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.876297] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451310, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.891981] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451311, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072864} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.891981] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 675.891981] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affb8676-9974-41c1-ba72-7eaa5ad88c47 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.913766] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93/57eaad0a-ca55-4bff-bbd0-6155ecf1cb93.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.914350] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a630a9ff-e524-4518-a105-174479a12b5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.941708] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 675.941708] env[62383]: value = "task-2451312" [ 675.941708] env[62383]: _type = "Task" [ 675.941708] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.951224] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451312, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.129243] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a59bc2-df9a-2ba6-4cca-2d9ea38deed9, 'name': SearchDatastore_Task, 'duration_secs': 0.064319} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.130072] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbec4bf8-53bc-49ab-97a7-617c155bffe9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.136476] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 676.136476] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52912286-d107-0795-9c26-29b74b906437" [ 676.136476] env[62383]: _type = "Task" [ 676.136476] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.145137] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52912286-d107-0795-9c26-29b74b906437, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.279996] env[62383]: DEBUG nova.scheduler.client.report [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.376984] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451310, 'name': CloneVM_Task} progress is 95%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.454673] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451312, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.649436] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52912286-d107-0795-9c26-29b74b906437, 'name': SearchDatastore_Task, 'duration_secs': 0.048025} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.649713] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.649934] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 676.650230] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a6109b0-f0fa-4d8a-803e-644e7c879fda {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.658762] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 676.658762] env[62383]: value = "task-2451314" [ 676.658762] env[62383]: _type = "Task" [ 676.658762] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.669548] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.706206] env[62383]: DEBUG nova.compute.manager [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Received event network-changed-0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 676.706206] env[62383]: DEBUG nova.compute.manager [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Refreshing instance network info cache due to event network-changed-0a1882ce-460b-4676-b041-6cc7defcf5f5. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 676.706206] env[62383]: DEBUG oslo_concurrency.lockutils [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] Acquiring lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.706206] env[62383]: DEBUG oslo_concurrency.lockutils [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] Acquired lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.706206] env[62383]: DEBUG nova.network.neutron [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Refreshing network info cache for port 0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 676.791021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.791021] env[62383]: DEBUG nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 676.792717] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.257s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.794226] env[62383]: DEBUG nova.objects.instance [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 676.879973] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451310, 'name': CloneVM_Task, 'duration_secs': 2.195297} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.879973] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Created linked-clone VM from snapshot [ 676.879973] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139519dc-3a31-4799-8a47-8601185785ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.886850] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Uploading image 742f6de3-bf7d-4a23-86fc-3507f04723f2 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 676.909212] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 676.909212] env[62383]: value = "vm-496408" [ 676.909212] env[62383]: _type = "VirtualMachine" [ 676.909212] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 676.909212] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-24205014-fa58-4889-8137-3fcb9fb6a4c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.917627] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lease: (returnval){ [ 676.917627] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526370cf-c819-7c09-ebc3-dfe80de93dc0" [ 676.917627] env[62383]: _type = "HttpNfcLease" [ 676.917627] env[62383]: } obtained for exporting VM: (result){ [ 676.917627] env[62383]: value = "vm-496408" [ 676.917627] env[62383]: _type = "VirtualMachine" [ 676.917627] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 676.917936] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the lease: (returnval){ [ 676.917936] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526370cf-c819-7c09-ebc3-dfe80de93dc0" [ 676.917936] env[62383]: _type = "HttpNfcLease" [ 676.917936] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 676.925347] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 676.925347] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526370cf-c819-7c09-ebc3-dfe80de93dc0" [ 676.925347] env[62383]: _type = "HttpNfcLease" [ 676.925347] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 676.953816] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451312, 'name': ReconfigVM_Task, 'duration_secs': 0.659211} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.953816] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93/57eaad0a-ca55-4bff-bbd0-6155ecf1cb93.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 676.954541] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4afb53c9-2573-474a-8bee-fb44b18ff3a0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.962432] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 676.962432] env[62383]: value = "task-2451316" [ 676.962432] env[62383]: _type = "Task" [ 676.962432] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.972881] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451316, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.173506] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451314, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.201706] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "1e367665-1d4b-4686-ac79-c946423c1762" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.202066] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.294486] env[62383]: DEBUG nova.compute.utils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 677.296713] env[62383]: DEBUG nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 677.296713] env[62383]: DEBUG nova.network.neutron [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 677.378050] env[62383]: DEBUG nova.policy [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6324b3a4f5a24752b0bef1b5d79ea2ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fabc88f824a44c57b19a07a605fb89fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 677.427556] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 677.427556] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526370cf-c819-7c09-ebc3-dfe80de93dc0" [ 677.427556] env[62383]: _type = "HttpNfcLease" [ 677.427556] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 677.427965] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 677.427965] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526370cf-c819-7c09-ebc3-dfe80de93dc0" [ 677.427965] env[62383]: _type = "HttpNfcLease" [ 677.427965] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 677.428650] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b4dac7-6496-447e-a6c1-3b55f6aa05f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.437816] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc1fe9-2e3a-b982-27f9-35f044e09a8a/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 677.437816] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc1fe9-2e3a-b982-27f9-35f044e09a8a/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 677.511259] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451316, 'name': Rename_Task, 'duration_secs': 0.177223} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.511634] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.511961] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6301bc6-6f79-4452-b0ae-ae88325ae218 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.521098] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 677.521098] env[62383]: value = "task-2451317" [ 677.521098] env[62383]: _type = "Task" [ 677.521098] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.531648] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451317, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.535943] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-89fb6cd1-c2dd-4e5c-825c-b813595f65b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.548786] env[62383]: DEBUG nova.network.neutron [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updated VIF entry in instance network info cache for port 0a1882ce-460b-4676-b041-6cc7defcf5f5. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 677.549184] env[62383]: DEBUG nova.network.neutron [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updating instance_info_cache with network_info: [{"id": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "address": "fa:16:3e:9f:03:f6", "network": {"id": "2e7ea332-ca96-4fb8-9325-de5e215d98a9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1124068406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbf8bc2ab78b4eedaca2c1ee32de8b27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a1882ce-46", "ovs_interfaceid": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.670403] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.812686} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.670711] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 677.670941] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 677.671244] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f81a8348-cce0-4e74-8a2b-6345e2280f14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.679745] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 677.679745] env[62383]: value = "task-2451318" [ 677.679745] env[62383]: _type = "Task" [ 677.679745] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.691529] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451318, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.799882] env[62383]: DEBUG nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 677.804776] env[62383]: DEBUG oslo_concurrency.lockutils [None req-163234b8-9a28-47ec-b214-d9cc76f8b932 tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.806016] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.625s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.806290] env[62383]: DEBUG nova.objects.instance [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lazy-loading 'resources' on Instance uuid 045e5f8f-edd5-425d-bccb-054d90db27d9 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 677.938197] env[62383]: DEBUG nova.network.neutron [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Successfully created port: ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 678.035511] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451317, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.052317] env[62383]: DEBUG oslo_concurrency.lockutils [req-41802e45-90ce-4869-a1f7-34e249733285 req-9d5506b7-0151-4a14-a808-a003691d770e service nova] Releasing lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.198611] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451318, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192018} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.198967] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 678.200572] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0e5cec-0057-4e49-a0c5-871fbb29d3b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.235813] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 678.236533] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dca2b3b9-65ab-4ac4-9f16-afe72f87bea7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.263893] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 678.263893] env[62383]: value = "task-2451319" [ 678.263893] env[62383]: _type = "Task" [ 678.263893] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.278087] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.542278] env[62383]: DEBUG oslo_vmware.api [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451317, 'name': PowerOnVM_Task, 'duration_secs': 0.577857} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.544536] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.544536] env[62383]: INFO nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Took 9.57 seconds to spawn the instance on the hypervisor. [ 678.544536] env[62383]: DEBUG nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.544758] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c95d1f-9d75-4b57-92c8-795eeb8a0aa8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.775162] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451319, 'name': ReconfigVM_Task, 'duration_secs': 0.47528} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.775734] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Reconfigured VM instance instance-00000026 to attach disk [datastore1] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 678.777163] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cbccb2c-d0d6-4994-9809-f8464bfcbd69 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.793751] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 678.793751] env[62383]: value = "task-2451321" [ 678.793751] env[62383]: _type = "Task" [ 678.793751] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.809272] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451321, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.815777] env[62383]: DEBUG nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 678.855526] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 678.855526] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.855526] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 678.855526] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.856123] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 678.856483] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 678.856821] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 678.857121] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 678.857441] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 678.857920] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 678.858939] env[62383]: DEBUG nova.virt.hardware [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 678.860710] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d530ce05-13dd-4e7f-8617-f1437dd4e133 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.874511] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d388978d-49ad-401b-8c1b-2d20ff87d3f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.905022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Acquiring lock "14bb9b79-d224-4a64-861e-30dd919c5741" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.905022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.023251] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8784744b-395b-4e59-ace4-6d10450f025f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.033671] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43bdb833-8ff2-4ecd-828f-44c36249be8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.076316] env[62383]: INFO nova.compute.manager [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Took 45.01 seconds to build instance. [ 679.078382] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c903b4-f18a-44fd-a9a4-35fb4c88dec9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.087494] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346206ac-f288-41dc-9f93-b8332af37057 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.106745] env[62383]: DEBUG nova.compute.provider_tree [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 679.305779] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451321, 'name': Rename_Task, 'duration_secs': 0.202235} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.306100] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 679.306433] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95552187-0450-42f2-96dc-d098d9019c4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.314723] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 679.314723] env[62383]: value = "task-2451322" [ 679.314723] env[62383]: _type = "Task" [ 679.314723] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.325290] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451322, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.408465] env[62383]: DEBUG nova.compute.utils [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 679.579188] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f8e7730c-2b05-4b31-bdac-099f20818ce2 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.879s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.610763] env[62383]: DEBUG nova.scheduler.client.report [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.827051] env[62383]: DEBUG oslo_vmware.api [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451322, 'name': PowerOnVM_Task, 'duration_secs': 0.502128} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.827333] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 679.827557] env[62383]: INFO nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Took 8.10 seconds to spawn the instance on the hypervisor. [ 679.827748] env[62383]: DEBUG nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 679.828592] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3e5195-be5a-442a-bc7c-bd2da72b9c66 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.912765] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.139513] env[62383]: DEBUG nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.149274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.336s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.153421] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.905s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.158857] env[62383]: INFO nova.compute.claims [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.169857] env[62383]: DEBUG nova.network.neutron [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Successfully updated port: ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.263297] env[62383]: INFO nova.scheduler.client.report [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Deleted allocations for instance 045e5f8f-edd5-425d-bccb-054d90db27d9 [ 680.355853] env[62383]: INFO nova.compute.manager [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Took 45.31 seconds to build instance. [ 680.682355] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.683503] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "refresh_cache-0c01a974-2318-461b-965f-ba4932e3bea1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.683754] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "refresh_cache-0c01a974-2318-461b-965f-ba4932e3bea1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.686730] env[62383]: DEBUG nova.network.neutron [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.773153] env[62383]: DEBUG oslo_concurrency.lockutils [None req-900ad976-4aa3-4e6f-b062-deb14533b18f tempest-ServerShowV257Test-561862572 tempest-ServerShowV257Test-561862572-project-member] Lock "045e5f8f-edd5-425d-bccb-054d90db27d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.037s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.859751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1525079e-bbb2-483b-9cdb-7bf56cf47df3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a16193af-410e-4bf6-bb06-a97791cf6060" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.836s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.015667] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Acquiring lock "14bb9b79-d224-4a64-861e-30dd919c5741" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.015957] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.016208] env[62383]: INFO nova.compute.manager [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Attaching volume 665dea38-a0be-4a97-be00-22d6a9018fe3 to /dev/sdb [ 681.071023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72246258-0a2f-4da8-aafe-b569c33f202b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.083933] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc79d69-e38c-4d3c-85d5-ec7a14c01509 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.106241] env[62383]: DEBUG nova.virt.block_device [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Updating existing volume attachment record: 91f37e19-ab1c-40d4-a482-447b338a7fbd {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 681.310606] env[62383]: DEBUG nova.network.neutron [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.363622] env[62383]: DEBUG nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 681.424191] env[62383]: DEBUG nova.compute.manager [req-dc2f19af-2473-42eb-9610-72a7ffede46a req-bdcf9708-452f-4f01-b0eb-b3de2368c873 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Received event network-vif-plugged-ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 681.424191] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc2f19af-2473-42eb-9610-72a7ffede46a req-bdcf9708-452f-4f01-b0eb-b3de2368c873 service nova] Acquiring lock "0c01a974-2318-461b-965f-ba4932e3bea1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.424191] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc2f19af-2473-42eb-9610-72a7ffede46a req-bdcf9708-452f-4f01-b0eb-b3de2368c873 service nova] Lock "0c01a974-2318-461b-965f-ba4932e3bea1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.424398] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc2f19af-2473-42eb-9610-72a7ffede46a req-bdcf9708-452f-4f01-b0eb-b3de2368c873 service nova] Lock "0c01a974-2318-461b-965f-ba4932e3bea1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.424878] env[62383]: DEBUG nova.compute.manager [req-dc2f19af-2473-42eb-9610-72a7ffede46a req-bdcf9708-452f-4f01-b0eb-b3de2368c873 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] No waiting events found dispatching network-vif-plugged-ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.424878] env[62383]: WARNING nova.compute.manager [req-dc2f19af-2473-42eb-9610-72a7ffede46a req-bdcf9708-452f-4f01-b0eb-b3de2368c873 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Received unexpected event network-vif-plugged-ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 for instance with vm_state building and task_state spawning. [ 681.733833] env[62383]: DEBUG nova.network.neutron [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Updating instance_info_cache with network_info: [{"id": "ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7", "address": "fa:16:3e:e9:91:6f", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea3f5faa-5f", "ovs_interfaceid": "ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.845549] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc258955-fb9e-46fc-b024-a59fb652ee55 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.856167] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0126a1-8ae8-4ff9-945b-ea8d4a66de5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.902222] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07e8402-c99f-4dbe-b269-8d7010482ede {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.915437] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd20ea3-0712-47c9-be4d-f2e1da11cd48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.934888] env[62383]: DEBUG nova.compute.provider_tree [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 681.935310] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.237848] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "refresh_cache-0c01a974-2318-461b-965f-ba4932e3bea1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.239067] env[62383]: DEBUG nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Instance network_info: |[{"id": "ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7", "address": "fa:16:3e:e9:91:6f", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea3f5faa-5f", "ovs_interfaceid": "ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 682.239576] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:91:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5f60c972-a72d-4c5f-a250-faadfd6eafbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.251024] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating folder: Project (fabc88f824a44c57b19a07a605fb89fd). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.251024] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2409da6-8bbb-45d6-aeee-c20487aefffc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.262920] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created folder: Project (fabc88f824a44c57b19a07a605fb89fd) in parent group-v496304. [ 682.263211] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating folder: Instances. Parent ref: group-v496413. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.263485] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78a8a896-1602-4c4a-95b9-d71bd2804689 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.279636] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created folder: Instances in parent group-v496413. [ 682.279962] env[62383]: DEBUG oslo.service.loopingcall [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.280196] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.280515] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-200fcc2d-690b-4d44-942f-db6457f70375 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.303416] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.303416] env[62383]: value = "task-2451329" [ 682.303416] env[62383]: _type = "Task" [ 682.303416] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.312943] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451329, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.464635] env[62383]: ERROR nova.scheduler.client.report [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [req-5d5d23e8-c6fc-455e-9cfb-01f7ea2cc9e0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5d5d23e8-c6fc-455e-9cfb-01f7ea2cc9e0"}]} [ 682.495613] env[62383]: DEBUG nova.scheduler.client.report [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 682.533691] env[62383]: DEBUG nova.scheduler.client.report [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 682.533691] env[62383]: DEBUG nova.compute.provider_tree [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 682.556175] env[62383]: DEBUG nova.scheduler.client.report [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 682.580588] env[62383]: DEBUG nova.scheduler.client.report [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 682.823175] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451329, 'name': CreateVM_Task, 'duration_secs': 0.422751} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.824096] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 682.828785] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 682.829026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.829413] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 682.830266] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-292f8eea-2b1b-4ad4-a550-e30807e61f7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.842188] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 682.842188] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525cf42c-e9c4-0686-1db6-fe22f88d435e" [ 682.842188] env[62383]: _type = "Task" [ 682.842188] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.855632] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525cf42c-e9c4-0686-1db6-fe22f88d435e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.165333] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358ec975-71d0-4fff-b0f7-f9105207a3a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.174365] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861bc70d-f229-4700-b59a-578a65f863fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.210381] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7102718b-87ff-400e-bdae-3f7a02c11d5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.222245] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361b1ad6-1d57-4d1b-9436-e7f97ef9a2fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.240229] env[62383]: DEBUG nova.compute.provider_tree [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.353748] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525cf42c-e9c4-0686-1db6-fe22f88d435e, 'name': SearchDatastore_Task, 'duration_secs': 0.025771} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.354164] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 683.354309] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.354556] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.354691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.354863] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 683.355145] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de264ebd-52bf-4c33-aad5-52fd8556f010 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.367280] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 683.367484] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 683.368257] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe2d949c-043b-4bc3-85d8-b9663c6f2365 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.374832] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 683.374832] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52645214-997a-91e9-fd14-01cfe4a3c388" [ 683.374832] env[62383]: _type = "Task" [ 683.374832] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.384292] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52645214-997a-91e9-fd14-01cfe4a3c388, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.709153] env[62383]: DEBUG nova.compute.manager [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Received event network-changed-ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 683.709354] env[62383]: DEBUG nova.compute.manager [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Refreshing instance network info cache due to event network-changed-ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 683.709572] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Acquiring lock "refresh_cache-0c01a974-2318-461b-965f-ba4932e3bea1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 683.709774] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Acquired lock "refresh_cache-0c01a974-2318-461b-965f-ba4932e3bea1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.709902] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Refreshing network info cache for port ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.780425] env[62383]: DEBUG nova.scheduler.client.report [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 683.780723] env[62383]: DEBUG nova.compute.provider_tree [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 61 to 62 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 683.780934] env[62383]: DEBUG nova.compute.provider_tree [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.886812] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52645214-997a-91e9-fd14-01cfe4a3c388, 'name': SearchDatastore_Task, 'duration_secs': 0.025845} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.889165] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f7ddd36-ff5e-4f2b-a63d-e6a40450f836 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.894866] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 683.894866] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521d266e-4dfd-956f-950d-dd62c1cb1977" [ 683.894866] env[62383]: _type = "Task" [ 683.894866] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.905142] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521d266e-4dfd-956f-950d-dd62c1cb1977, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.287367] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.135s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.289786] env[62383]: DEBUG nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 684.292518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 32.917s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.376145] env[62383]: INFO nova.compute.manager [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Rebuilding instance [ 684.408646] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521d266e-4dfd-956f-950d-dd62c1cb1977, 'name': SearchDatastore_Task, 'duration_secs': 0.03714} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.409538] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 684.409854] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0c01a974-2318-461b-965f-ba4932e3bea1/0c01a974-2318-461b-965f-ba4932e3bea1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.410177] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-debe6213-d3a3-4963-b318-0f9043c9d73c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.425413] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 684.425413] env[62383]: value = "task-2451332" [ 684.425413] env[62383]: _type = "Task" [ 684.425413] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.436427] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.442895] env[62383]: DEBUG nova.compute.manager [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 684.444909] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f57c9ff-d33a-48ad-b8f4-d0e547df30da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.505325] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "17498cb6-8b16-4a2e-96ae-c594966cee77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.505572] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.505779] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "17498cb6-8b16-4a2e-96ae-c594966cee77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.506068] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.506554] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.510389] env[62383]: INFO nova.compute.manager [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Terminating instance [ 684.538334] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Updated VIF entry in instance network info cache for port ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 684.538728] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Updating instance_info_cache with network_info: [{"id": "ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7", "address": "fa:16:3e:e9:91:6f", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea3f5faa-5f", "ovs_interfaceid": "ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.795271] env[62383]: DEBUG nova.compute.utils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 684.809598] env[62383]: DEBUG nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.810080] env[62383]: DEBUG nova.network.neutron [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.865175] env[62383]: DEBUG nova.policy [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02daed55abc149a2a3f4502400b674eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a657c912fef04c5ca8c0b5d96a8a3064', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.940527] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451332, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.014428] env[62383]: DEBUG nova.compute.manager [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 685.014732] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 685.015706] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cf9927-515b-4d46-9fc5-1f7936c3e1ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.026538] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 685.027367] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5bb3765-7c86-4698-9121-b4629ed79e09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.036627] env[62383]: DEBUG oslo_vmware.api [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 685.036627] env[62383]: value = "task-2451334" [ 685.036627] env[62383]: _type = "Task" [ 685.036627] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.042159] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Releasing lock "refresh_cache-0c01a974-2318-461b-965f-ba4932e3bea1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.042997] env[62383]: DEBUG nova.compute.manager [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Received event network-changed-0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 685.042997] env[62383]: DEBUG nova.compute.manager [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Refreshing instance network info cache due to event network-changed-0a1882ce-460b-4676-b041-6cc7defcf5f5. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 685.044467] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Acquiring lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.044467] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Acquired lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.044467] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Refreshing network info cache for port 0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 685.057259] env[62383]: DEBUG oslo_vmware.api [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451334, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.310536] env[62383]: DEBUG nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 685.354755] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1a740010-ddd0-4df6-8ae6-02f1ed50137f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.355060] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.355238] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance da16da02-25ab-46f9-9070-9fdde0b3a75e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.355445] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8a165d96-f503-4bc5-bff4-e6a85201e137 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.355664] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 14bb9b79-d224-4a64-861e-30dd919c5741 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.355967] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 67d41910-54e1-48f1-b0d3-f34a62595ef2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.356074] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 80821717-f961-49c7-8b79-c152edfdfb94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.356274] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.356492] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 685.356665] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance f28beb17-8455-49d3-8be0-7636b9abe4e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.356796] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2337e9a2-736c-4d58-ac2e-04c8ad813be4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.356914] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance e51a0dd7-b5da-44cb-9cd8-62932aec3ad5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.357354] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 152567ba-f24c-4674-b06e-98c76a3da324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.357671] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c2fee51e-3cc9-421c-bfe5-b324a5b14197 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.358169] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 330b5e35-3292-4df7-b288-547b158e671a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 685.358259] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 6b5daa17-ad4a-4b30-a1fe-083a1a238667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.358422] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 872ac212-9f29-426d-94c7-e1bf73aebd94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.359268] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9c2c55a9-5b24-4d52-8d6b-666609349a3a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.359458] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.359718] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 17498cb6-8b16-4a2e-96ae-c594966cee77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.359880] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.360073] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a16193af-410e-4bf6-bb06-a97791cf6060 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.361684] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 0c01a974-2318-461b-965f-ba4932e3bea1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.361684] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 685.396204] env[62383]: DEBUG nova.network.neutron [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Successfully created port: 241eb943-d5b6-4224-b2fb-c12596e3b206 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.441361] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.696247} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.441361] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0c01a974-2318-461b-965f-ba4932e3bea1/0c01a974-2318-461b-965f-ba4932e3bea1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.441361] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.441361] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42ddc85e-aedb-4970-880a-77dd98d7b465 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.453320] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 685.453320] env[62383]: value = "task-2451335" [ 685.453320] env[62383]: _type = "Task" [ 685.453320] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.466913] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 685.467306] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451335, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.467571] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3f48b38-1dde-4ba9-8869-1ad294ebf970 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.475815] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 685.475815] env[62383]: value = "task-2451336" [ 685.475815] env[62383]: _type = "Task" [ 685.475815] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.485864] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.555197] env[62383]: DEBUG oslo_vmware.api [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451334, 'name': PowerOffVM_Task, 'duration_secs': 0.259802} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.555703] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 685.555703] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 685.556316] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-663ea692-9602-498a-9ba9-1f2d8eb7d73c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.637590] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 685.639619] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 685.639619] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Deleting the datastore file [datastore1] 17498cb6-8b16-4a2e-96ae-c594966cee77 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.639619] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef90994d-a8c2-487f-b3e0-7f60cfc21410 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.652500] env[62383]: DEBUG oslo_vmware.api [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for the task: (returnval){ [ 685.652500] env[62383]: value = "task-2451338" [ 685.652500] env[62383]: _type = "Task" [ 685.652500] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.668711] env[62383]: DEBUG oslo_vmware.api [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.687040] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 685.687343] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496412', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'name': 'volume-665dea38-a0be-4a97-be00-22d6a9018fe3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14bb9b79-d224-4a64-861e-30dd919c5741', 'attached_at': '', 'detached_at': '', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'serial': '665dea38-a0be-4a97-be00-22d6a9018fe3'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 685.688236] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4f7063-9bc0-42ed-9585-4c133e768de2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.706500] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4c9c93-2b38-4d8c-a249-a66ebe0ae85d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.736836] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] volume-665dea38-a0be-4a97-be00-22d6a9018fe3/volume-665dea38-a0be-4a97-be00-22d6a9018fe3.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 685.739355] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f50180a4-b240-44ad-a84d-5b2b726b14c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.761899] env[62383]: DEBUG oslo_vmware.api [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Waiting for the task: (returnval){ [ 685.761899] env[62383]: value = "task-2451339" [ 685.761899] env[62383]: _type = "Task" [ 685.761899] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.774697] env[62383]: DEBUG oslo_vmware.api [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451339, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.870403] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 685.967533] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451335, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080891} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.967880] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 685.969696] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c779dbc1-59e6-44bb-a7e5-86d40d1e6d0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.003276] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 0c01a974-2318-461b-965f-ba4932e3bea1/0c01a974-2318-461b-965f-ba4932e3bea1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.006963] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-370fe09f-4469-4524-8cd6-d2bab9c71231 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.022342] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updated VIF entry in instance network info cache for port 0a1882ce-460b-4676-b041-6cc7defcf5f5. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 686.022755] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updating instance_info_cache with network_info: [{"id": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "address": "fa:16:3e:9f:03:f6", "network": {"id": "2e7ea332-ca96-4fb8-9325-de5e215d98a9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1124068406-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbf8bc2ab78b4eedaca2c1ee32de8b27", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a1882ce-46", "ovs_interfaceid": "0a1882ce-460b-4676-b041-6cc7defcf5f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.037057] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451336, 'name': PowerOffVM_Task, 'duration_secs': 0.227021} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.039398] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 686.039705] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 686.040117] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 686.040117] env[62383]: value = "task-2451340" [ 686.040117] env[62383]: _type = "Task" [ 686.040117] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.041532] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4695093c-22d4-4631-8085-9db36c903fa0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.062173] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 686.065666] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-736c0f06-9336-42e8-99d8-566a154b9077 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.068173] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451340, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.143581] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 686.143654] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 686.143865] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleting the datastore file [datastore1] a16193af-410e-4bf6-bb06-a97791cf6060 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 686.144892] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3aa9996-2d80-4eb9-bca2-ea457004641f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.158825] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 686.158825] env[62383]: value = "task-2451342" [ 686.158825] env[62383]: _type = "Task" [ 686.158825] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.170916] env[62383]: DEBUG oslo_vmware.api [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Task: {'id': task-2451338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.309577} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.170916] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 686.170916] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 686.170916] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.170916] env[62383]: INFO nova.compute.manager [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Took 1.16 seconds to destroy the instance on the hypervisor. [ 686.171182] env[62383]: DEBUG oslo.service.loopingcall [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.171182] env[62383]: DEBUG nova.compute.manager [-] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 686.171245] env[62383]: DEBUG nova.network.neutron [-] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 686.179380] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.278242] env[62383]: DEBUG oslo_vmware.api [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451339, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.329525] env[62383]: DEBUG nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 686.374031] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.388987] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 686.389529] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 686.389816] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 686.390144] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 686.390413] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 686.393042] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 686.393042] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 686.393042] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 686.393042] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 686.393042] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 686.393276] env[62383]: DEBUG nova.virt.hardware [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 686.393276] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e03948-da15-4d35-94e0-a3f47bf1c4f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.403610] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd8e740-6a59-406d-bdd3-3d21786332e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.530383] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Releasing lock "refresh_cache-17498cb6-8b16-4a2e-96ae-c594966cee77" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.530383] env[62383]: DEBUG nova.compute.manager [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Received event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 686.530576] env[62383]: DEBUG nova.compute.manager [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing instance network info cache due to event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 686.530771] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Acquiring lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.530941] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Acquired lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.531159] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 686.559068] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451340, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.672191] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197018} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.672445] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 686.672624] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 686.672794] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 686.752861] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc1fe9-2e3a-b982-27f9-35f044e09a8a/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 686.753930] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352cafcd-9d23-4d5c-ba73-17eb20b30878 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.762284] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc1fe9-2e3a-b982-27f9-35f044e09a8a/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 686.762563] env[62383]: ERROR oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc1fe9-2e3a-b982-27f9-35f044e09a8a/disk-0.vmdk due to incomplete transfer. [ 686.762918] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cea7a513-8f17-4d45-b65f-2f7fb4ad8fb7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.776534] env[62383]: DEBUG oslo_vmware.api [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451339, 'name': ReconfigVM_Task, 'duration_secs': 0.688114} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.778733] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Reconfigured VM instance instance-0000000d to attach disk [datastore2] volume-665dea38-a0be-4a97-be00-22d6a9018fe3/volume-665dea38-a0be-4a97-be00-22d6a9018fe3.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.783104] env[62383]: DEBUG oslo_vmware.rw_handles [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fc1fe9-2e3a-b982-27f9-35f044e09a8a/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 686.783362] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Uploaded image 742f6de3-bf7d-4a23-86fc-3507f04723f2 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 686.785728] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 686.786045] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed982188-82b4-4d30-8926-f2ab551f7dee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.801672] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-437a62bd-7e29-4577-afc5-e916f59f2b1f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.808127] env[62383]: DEBUG nova.compute.manager [req-6cca32c2-151b-4f30-8e07-20efbba7c131 req-2a7f51b5-c58a-4435-92f4-0f7c6c119a14 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Received event network-vif-deleted-0a1882ce-460b-4676-b041-6cc7defcf5f5 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 686.808492] env[62383]: INFO nova.compute.manager [req-6cca32c2-151b-4f30-8e07-20efbba7c131 req-2a7f51b5-c58a-4435-92f4-0f7c6c119a14 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Neutron deleted interface 0a1882ce-460b-4676-b041-6cc7defcf5f5; detaching it from the instance and deleting it from the info cache [ 686.808921] env[62383]: DEBUG nova.network.neutron [req-6cca32c2-151b-4f30-8e07-20efbba7c131 req-2a7f51b5-c58a-4435-92f4-0f7c6c119a14 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.814714] env[62383]: DEBUG oslo_vmware.api [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Waiting for the task: (returnval){ [ 686.814714] env[62383]: value = "task-2451345" [ 686.814714] env[62383]: _type = "Task" [ 686.814714] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.815043] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 686.815043] env[62383]: value = "task-2451344" [ 686.815043] env[62383]: _type = "Task" [ 686.815043] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.832800] env[62383]: DEBUG oslo_vmware.api [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451345, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.838179] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451344, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.840879] env[62383]: DEBUG nova.compute.manager [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 686.842392] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47deeefd-3575-4eb7-85e5-78c558d0e9d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.878020] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 67f05a2b-f323-4e4a-ac13-7f4745593be0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.058341] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451340, 'name': ReconfigVM_Task, 'duration_secs': 0.522933} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.058670] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 0c01a974-2318-461b-965f-ba4932e3bea1/0c01a974-2318-461b-965f-ba4932e3bea1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.059423] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7327de0c-1a17-42dc-a7bd-5bb886a366fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.069011] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 687.069011] env[62383]: value = "task-2451346" [ 687.069011] env[62383]: _type = "Task" [ 687.069011] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.083807] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451346, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.160525] env[62383]: DEBUG nova.network.neutron [-] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.316954] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0cd2021-bdc6-42be-9c3b-740657b3465e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.340633] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451344, 'name': Destroy_Task, 'duration_secs': 0.378001} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.345659] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Destroyed the VM [ 687.345785] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 687.346467] env[62383]: DEBUG oslo_vmware.api [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451345, 'name': ReconfigVM_Task, 'duration_secs': 0.180656} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.346679] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-555585f7-2d80-4ff8-862b-41d271bdcc63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.351559] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f2a39e-0d04-4a64-8fe3-45abca7e1439 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.363238] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496412', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'name': 'volume-665dea38-a0be-4a97-be00-22d6a9018fe3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14bb9b79-d224-4a64-861e-30dd919c5741', 'attached_at': '', 'detached_at': '', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'serial': '665dea38-a0be-4a97-be00-22d6a9018fe3'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 687.365757] env[62383]: INFO nova.compute.manager [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] instance snapshotting [ 687.366396] env[62383]: DEBUG nova.objects.instance [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'flavor' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 687.377889] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 687.377889] env[62383]: value = "task-2451347" [ 687.377889] env[62383]: _type = "Task" [ 687.377889] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.382968] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8bd05dac-7aa2-44c5-8752-6045c01d213d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.395516] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451347, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.411299] env[62383]: DEBUG nova.compute.manager [req-6cca32c2-151b-4f30-8e07-20efbba7c131 req-2a7f51b5-c58a-4435-92f4-0f7c6c119a14 service nova] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Detach interface failed, port_id=0a1882ce-460b-4676-b041-6cc7defcf5f5, reason: Instance 17498cb6-8b16-4a2e-96ae-c594966cee77 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 687.442876] env[62383]: DEBUG nova.compute.manager [req-a693c347-f636-4dac-bfb2-cc728a8bedd1 req-48678e18-39bb-4943-923e-f1a985df55b7 service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Received event network-vif-plugged-241eb943-d5b6-4224-b2fb-c12596e3b206 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 687.443117] env[62383]: DEBUG oslo_concurrency.lockutils [req-a693c347-f636-4dac-bfb2-cc728a8bedd1 req-48678e18-39bb-4943-923e-f1a985df55b7 service nova] Acquiring lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.443432] env[62383]: DEBUG oslo_concurrency.lockutils [req-a693c347-f636-4dac-bfb2-cc728a8bedd1 req-48678e18-39bb-4943-923e-f1a985df55b7 service nova] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.443525] env[62383]: DEBUG oslo_concurrency.lockutils [req-a693c347-f636-4dac-bfb2-cc728a8bedd1 req-48678e18-39bb-4943-923e-f1a985df55b7 service nova] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.443651] env[62383]: DEBUG nova.compute.manager [req-a693c347-f636-4dac-bfb2-cc728a8bedd1 req-48678e18-39bb-4943-923e-f1a985df55b7 service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] No waiting events found dispatching network-vif-plugged-241eb943-d5b6-4224-b2fb-c12596e3b206 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 687.443815] env[62383]: WARNING nova.compute.manager [req-a693c347-f636-4dac-bfb2-cc728a8bedd1 req-48678e18-39bb-4943-923e-f1a985df55b7 service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Received unexpected event network-vif-plugged-241eb943-d5b6-4224-b2fb-c12596e3b206 for instance with vm_state building and task_state spawning. [ 687.560777] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updated VIF entry in instance network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 687.561350] env[62383]: DEBUG nova.network.neutron [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.569031] env[62383]: DEBUG nova.network.neutron [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Successfully updated port: 241eb943-d5b6-4224-b2fb-c12596e3b206 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.586254] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451346, 'name': Rename_Task, 'duration_secs': 0.19598} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.587816] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.588373] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7308270-d014-4e52-9e16-84eea934c7e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.599576] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 687.599576] env[62383]: value = "task-2451348" [ 687.599576] env[62383]: _type = "Task" [ 687.599576] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.610805] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.664026] env[62383]: INFO nova.compute.manager [-] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Took 1.49 seconds to deallocate network for instance. [ 687.717424] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.717822] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.718101] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.718408] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.718661] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.718913] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.719272] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.719534] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.719809] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.720104] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.720409] env[62383]: DEBUG nova.virt.hardware [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.721996] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2615ca-5ae6-42bf-bff2-4c035bf75144 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.739155] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d3a7b5-50fe-45c1-b472-ec2b0fe6878e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.760040] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:ef:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9648e8c9-183c-4825-9b3d-25732ebd4892', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 687.767929] env[62383]: DEBUG oslo.service.loopingcall [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 687.768446] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 687.768729] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27a2a641-2dca-42ea-81cc-45309391fb51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.791015] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 687.791015] env[62383]: value = "task-2451349" [ 687.791015] env[62383]: _type = "Task" [ 687.791015] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.808732] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451349, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.881266] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe5c2ed-1643-4648-a195-104b5132cc73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.887607] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 583138d1-f928-4e33-a443-11c627203c44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.907712] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451347, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.911803] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1072d37-ca81-4b93-bcf4-702e7aa1e53b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.065163] env[62383]: DEBUG oslo_concurrency.lockutils [req-b68f819c-2818-44fc-8ba8-533075041945 req-303a360a-37ae-42b2-8585-ce02bdbfc3c5 service nova] Releasing lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.076929] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.077220] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.077475] env[62383]: DEBUG nova.network.neutron [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.114259] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451348, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.170808] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.303708] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451349, 'name': CreateVM_Task, 'duration_secs': 0.48977} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.303958] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 688.304689] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.304876] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.305222] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 688.305505] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e981bc12-5a65-4a5f-8c4e-d568ecd9f7db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.312641] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 688.312641] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]529f3773-775b-69fa-4fa3-df87552b1202" [ 688.312641] env[62383]: _type = "Task" [ 688.312641] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.324203] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529f3773-775b-69fa-4fa3-df87552b1202, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.393776] env[62383]: DEBUG oslo_vmware.api [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451347, 'name': RemoveSnapshot_Task, 'duration_secs': 0.981378} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.394070] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 688.394302] env[62383]: INFO nova.compute.manager [None req-1b6dac50-400e-45f3-913c-cb84f0dc979b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Took 16.15 seconds to snapshot the instance on the hypervisor. [ 688.411785] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a10f5b03-c45b-4cc2-923f-3227665d236c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.419159] env[62383]: DEBUG nova.objects.instance [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lazy-loading 'flavor' on Instance uuid 14bb9b79-d224-4a64-861e-30dd919c5741 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 688.428604] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 688.428920] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7711d435-074e-4571-a2fe-9315e1120fd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.441320] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 688.441320] env[62383]: value = "task-2451350" [ 688.441320] env[62383]: _type = "Task" [ 688.441320] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.451020] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451350, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.613613] env[62383]: DEBUG oslo_vmware.api [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451348, 'name': PowerOnVM_Task, 'duration_secs': 0.623088} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.615157] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 688.615157] env[62383]: INFO nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Took 9.80 seconds to spawn the instance on the hypervisor. [ 688.615157] env[62383]: DEBUG nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 688.616119] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f415f65-a329-48e0-b05b-30e89705a5bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.628203] env[62383]: DEBUG nova.network.neutron [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.826068] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529f3773-775b-69fa-4fa3-df87552b1202, 'name': SearchDatastore_Task, 'duration_secs': 0.013817} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.826417] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.826656] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 688.827024] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.827096] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.827218] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 688.827506] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57efee96-e9f6-4fe6-b47f-8aab5fd76c2f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.837352] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 688.837554] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 688.838388] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-525b0664-c9d7-49b9-9991-3882fd62b0a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.844769] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 688.844769] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5219f598-5192-e58d-6fa4-beff4524ac26" [ 688.844769] env[62383]: _type = "Task" [ 688.844769] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.852039] env[62383]: DEBUG nova.network.neutron [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Updating instance_info_cache with network_info: [{"id": "241eb943-d5b6-4224-b2fb-c12596e3b206", "address": "fa:16:3e:af:5a:54", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241eb943-d5", "ovs_interfaceid": "241eb943-d5b6-4224-b2fb-c12596e3b206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.858403] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5219f598-5192-e58d-6fa4-beff4524ac26, 'name': SearchDatastore_Task, 'duration_secs': 0.01005} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.859234] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04d6ede3-a694-40cf-a285-4b158aeb9f08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.865170] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 688.865170] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521ef676-7dc0-5a2e-ea35-d694e54d10fa" [ 688.865170] env[62383]: _type = "Task" [ 688.865170] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.874415] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521ef676-7dc0-5a2e-ea35-d694e54d10fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.915981] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9604eadf-a027-46dd-989b-0d4b752f883a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.928458] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2eca2ada-92b8-4020-adcf-652a0fe27b0d tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.912s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.950471] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451350, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.144332] env[62383]: INFO nova.compute.manager [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Took 48.12 seconds to build instance. [ 689.355114] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.357771] env[62383]: DEBUG nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Instance network_info: |[{"id": "241eb943-d5b6-4224-b2fb-c12596e3b206", "address": "fa:16:3e:af:5a:54", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241eb943-d5", "ovs_interfaceid": "241eb943-d5b6-4224-b2fb-c12596e3b206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.359676] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:5a:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5efce30e-48dd-493a-a354-f562a8adf7af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '241eb943-d5b6-4224-b2fb-c12596e3b206', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.367632] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Creating folder: Project (a657c912fef04c5ca8c0b5d96a8a3064). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.368603] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed2ad768-1048-4824-95fb-cc9a6fe9a5da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.385530] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521ef676-7dc0-5a2e-ea35-d694e54d10fa, 'name': SearchDatastore_Task, 'duration_secs': 0.009686} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.386072] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.386419] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 689.386792] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04283f93-d7f6-4ed7-96e4-c40d94fa8261 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.391534] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Created folder: Project (a657c912fef04c5ca8c0b5d96a8a3064) in parent group-v496304. [ 689.392113] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Creating folder: Instances. Parent ref: group-v496417. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.393039] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb1eb017-2342-4eb0-a0ec-cd62d58b4d67 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.397179] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 689.397179] env[62383]: value = "task-2451352" [ 689.397179] env[62383]: _type = "Task" [ 689.397179] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.409772] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.411432] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Created folder: Instances in parent group-v496417. [ 689.411839] env[62383]: DEBUG oslo.service.loopingcall [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 689.412208] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.412944] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6868bfcc-65c0-4739-91c0-6afd4c88a636 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.430045] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 563840a8-8fa7-4bfa-9912-933c14e7076a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.452142] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.452142] env[62383]: value = "task-2451354" [ 689.452142] env[62383]: _type = "Task" [ 689.452142] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.460167] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451350, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.467028] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451354, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.645467] env[62383]: DEBUG oslo_concurrency.lockutils [None req-12bf94da-c47a-4abd-a666-e05878ea69f0 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.438s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.729185] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "0c01a974-2318-461b-965f-ba4932e3bea1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.729185] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.729185] env[62383]: DEBUG nova.compute.manager [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.730371] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c203619-a15d-4052-af95-10b1c48c1da2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.739374] env[62383]: DEBUG nova.compute.manager [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 689.740220] env[62383]: DEBUG nova.objects.instance [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lazy-loading 'flavor' on Instance uuid 0c01a974-2318-461b-965f-ba4932e3bea1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.914718] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451352, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.935895] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 0f48434f-859f-4910-883f-2f81be647bad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.954848] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451350, 'name': CreateSnapshot_Task, 'duration_secs': 1.187328} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.958801] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 689.959823] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d5c82e-e283-4392-8d14-d090583d5d54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.977316] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451354, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.019624] env[62383]: DEBUG nova.compute.manager [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Received event network-changed-241eb943-d5b6-4224-b2fb-c12596e3b206 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 690.019956] env[62383]: DEBUG nova.compute.manager [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Refreshing instance network info cache due to event network-changed-241eb943-d5b6-4224-b2fb-c12596e3b206. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 690.020245] env[62383]: DEBUG oslo_concurrency.lockutils [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] Acquiring lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.020440] env[62383]: DEBUG oslo_concurrency.lockutils [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] Acquired lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.020723] env[62383]: DEBUG nova.network.neutron [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Refreshing network info cache for port 241eb943-d5b6-4224-b2fb-c12596e3b206 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.149158] env[62383]: DEBUG nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 690.416371] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.888249} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.416952] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 690.417238] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 690.417567] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eea8d92b-0807-44a6-b1ba-947efe7a6ed1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.429257] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 690.429257] env[62383]: value = "task-2451355" [ 690.429257] env[62383]: _type = "Task" [ 690.429257] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.442267] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.443856] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.468120] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451354, 'name': CreateVM_Task, 'duration_secs': 0.828176} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.468440] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 690.469235] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.469451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.469841] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 690.470127] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68194d67-bf1a-4096-841d-55db666c524c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.475999] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 690.475999] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528d39de-34ab-4b88-88ac-4d665cb34543" [ 690.475999] env[62383]: _type = "Task" [ 690.475999] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.486756] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 690.489841] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-42c6953a-6c7a-4d2b-98f2-f3b0171a5e72 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.500955] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528d39de-34ab-4b88-88ac-4d665cb34543, 'name': SearchDatastore_Task, 'duration_secs': 0.013194} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.502722] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.502804] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.503022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.503171] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.504029] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.504029] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 690.504029] env[62383]: value = "task-2451356" [ 690.504029] env[62383]: _type = "Task" [ 690.504029] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.504029] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a6de9b2-027e-46c9-8f93-45fb10162bb1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.516389] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451356, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.520561] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.520757] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.521726] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48bb4d60-fb77-4e88-8c04-278f3fbf0e69 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.529310] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 690.529310] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5239882d-797a-0c0f-625e-d2c1a389e46b" [ 690.529310] env[62383]: _type = "Task" [ 690.529310] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.545950] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5239882d-797a-0c0f-625e-d2c1a389e46b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.575347] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Acquiring lock "d0311c29-e1ed-446f-a52b-1687b9561740" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.575425] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "d0311c29-e1ed-446f-a52b-1687b9561740" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.591288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Acquiring lock "14bb9b79-d224-4a64-861e-30dd919c5741" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.595223] env[62383]: DEBUG oslo_concurrency.lockutils [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.680479] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.747635] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 690.747931] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-924ad175-b1bf-4ca5-ad6a-3b142136d6bd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.757800] env[62383]: DEBUG oslo_vmware.api [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 690.757800] env[62383]: value = "task-2451357" [ 690.757800] env[62383]: _type = "Task" [ 690.757800] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.769814] env[62383]: DEBUG oslo_vmware.api [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.940179] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113264} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.940440] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 690.942121] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00cd11e-857d-4829-a617-a7f6c0e7e04c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.948165] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1e367665-1d4b-4686-ac79-c946423c1762 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.948165] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 22 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 690.948165] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4800MB phys_disk=200GB used_disk=22GB total_vcpus=48 used_vcpus=22 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 690.949427] env[62383]: DEBUG nova.network.neutron [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Updated VIF entry in instance network info cache for port 241eb943-d5b6-4224-b2fb-c12596e3b206. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.949738] env[62383]: DEBUG nova.network.neutron [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Updating instance_info_cache with network_info: [{"id": "241eb943-d5b6-4224-b2fb-c12596e3b206", "address": "fa:16:3e:af:5a:54", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241eb943-d5", "ovs_interfaceid": "241eb943-d5b6-4224-b2fb-c12596e3b206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.971310] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 690.971880] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e2cfd61-b674-4430-8c04-ab52556dc32c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.998208] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 690.998208] env[62383]: value = "task-2451358" [ 690.998208] env[62383]: _type = "Task" [ 690.998208] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.013488] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451358, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.020624] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451356, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.042266] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5239882d-797a-0c0f-625e-d2c1a389e46b, 'name': SearchDatastore_Task, 'duration_secs': 0.015883} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.046375] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bf8da80-5658-456b-9bf2-1c9c79abf6a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.053630] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 691.053630] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fd4f4e-905e-953a-d262-91dc236c7e2d" [ 691.053630] env[62383]: _type = "Task" [ 691.053630] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.068617] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fd4f4e-905e-953a-d262-91dc236c7e2d, 'name': SearchDatastore_Task, 'duration_secs': 0.011781} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.068955] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.069287] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.069589] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e513078e-e367-4cab-ad04-e4936f12a5e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.079100] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 691.079100] env[62383]: value = "task-2451359" [ 691.079100] env[62383]: _type = "Task" [ 691.079100] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.088454] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.102773] env[62383]: INFO nova.compute.manager [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Detaching volume 665dea38-a0be-4a97-be00-22d6a9018fe3 [ 691.161655] env[62383]: INFO nova.virt.block_device [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Attempting to driver detach volume 665dea38-a0be-4a97-be00-22d6a9018fe3 from mountpoint /dev/sdb [ 691.162058] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 691.162313] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496412', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'name': 'volume-665dea38-a0be-4a97-be00-22d6a9018fe3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14bb9b79-d224-4a64-861e-30dd919c5741', 'attached_at': '', 'detached_at': '', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'serial': '665dea38-a0be-4a97-be00-22d6a9018fe3'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 691.163551] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c8d3a2-a8ba-47de-a10d-31603e3cc4d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.194306] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d0de96-c024-4965-9425-991ca95bd823 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.203501] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6abfb24-d5a3-44c0-9655-b4d6b2711fab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.242024] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46003fa9-28b6-49a3-a028-5d2fc174f735 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.269347] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] The volume has not been displaced from its original location: [datastore2] volume-665dea38-a0be-4a97-be00-22d6a9018fe3/volume-665dea38-a0be-4a97-be00-22d6a9018fe3.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 691.279508] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 691.291269] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae8c5d7e-a203-4ca6-b0b8-518a95080f50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.321538] env[62383]: DEBUG oslo_vmware.api [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Waiting for the task: (returnval){ [ 691.321538] env[62383]: value = "task-2451360" [ 691.321538] env[62383]: _type = "Task" [ 691.321538] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.326193] env[62383]: DEBUG oslo_vmware.api [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451357, 'name': PowerOffVM_Task, 'duration_secs': 0.481467} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.331871] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 691.332213] env[62383]: DEBUG nova.compute.manager [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.337013] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99cb22a1-6d7f-4903-8f06-38718975a8fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.353053] env[62383]: DEBUG oslo_vmware.api [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451360, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.373095] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.373095] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.452875] env[62383]: DEBUG oslo_concurrency.lockutils [req-750c9cb0-09a1-4d1a-a8e7-2ce5bf02e4cc req-aa9b6eec-ae18-40bd-bb47-d1b7956277cf service nova] Releasing lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.509998] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451358, 'name': ReconfigVM_Task, 'duration_secs': 0.317813} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.513780] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Reconfigured VM instance instance-00000026 to attach disk [datastore2] a16193af-410e-4bf6-bb06-a97791cf6060/a16193af-410e-4bf6-bb06-a97791cf6060.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 691.514553] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9285726-26cd-4ad6-98e8-e4f5afac9225 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.529659] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451356, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.530675] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 691.530675] env[62383]: value = "task-2451361" [ 691.530675] env[62383]: _type = "Task" [ 691.530675] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.543919] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451361, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.590538] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451359, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.727107] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976ff9b7-4af2-409d-938f-01e2ad76f380 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.736327] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37240514-27d5-419c-9737-92b44a77b733 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.740317] env[62383]: DEBUG nova.compute.manager [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.741166] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb36f3fc-408c-43ed-ae34-d3bfb66dfd63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.776053] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "67d41910-54e1-48f1-b0d3-f34a62595ef2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.776296] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.776500] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "67d41910-54e1-48f1-b0d3-f34a62595ef2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.776689] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.777177] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.778909] env[62383]: INFO nova.compute.manager [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Terminating instance [ 691.780608] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366b5d5b-0a7b-45b8-bcec-d54e63d9fb91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.790949] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e119ae-3187-4fd5-8c76-9f1d79e0d25a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.806202] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 691.835592] env[62383]: DEBUG oslo_vmware.api [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451360, 'name': ReconfigVM_Task, 'duration_secs': 0.358051} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.835935] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 691.840611] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6ed58a5-7459-4011-bd56-5f451e6c4b4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.857343] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78d741dd-03f8-4056-a2cd-d27fba4534e9 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.129s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.858393] env[62383]: DEBUG oslo_vmware.api [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Waiting for the task: (returnval){ [ 691.858393] env[62383]: value = "task-2451362" [ 691.858393] env[62383]: _type = "Task" [ 691.858393] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.867802] env[62383]: DEBUG oslo_vmware.api [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.020752] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451356, 'name': CloneVM_Task} progress is 95%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.040755] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451361, 'name': Rename_Task, 'duration_secs': 0.165115} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.041115] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 692.041413] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58f3c9b3-7ed1-497a-a2a1-008816c5532b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.049836] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 692.049836] env[62383]: value = "task-2451363" [ 692.049836] env[62383]: _type = "Task" [ 692.049836] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.059138] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451363, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.093814] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451359, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518286} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.093814] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 692.093814] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 692.093814] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b25dad3-aa4f-498d-af95-d9bfb05cd6d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.101476] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 692.101476] env[62383]: value = "task-2451364" [ 692.101476] env[62383]: _type = "Task" [ 692.101476] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.111464] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451364, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.281019] env[62383]: INFO nova.compute.manager [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] instance snapshotting [ 692.284958] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a724df55-ccaf-4796-a354-3f7d5e9be34c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.288479] env[62383]: DEBUG nova.compute.manager [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 692.288949] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.289873] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1738b573-3449-4d77-9a6f-bbece263d8fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.316291] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfc32fd-e0c2-475b-a0ab-51383c37abff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.321030] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 692.321741] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91fd5114-79c5-4031-ac45-f5e8b155cd08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.329772] env[62383]: DEBUG oslo_vmware.api [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 692.329772] env[62383]: value = "task-2451365" [ 692.329772] env[62383]: _type = "Task" [ 692.329772] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.335301] env[62383]: ERROR nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [req-d4d94eb5-a4ce-4bfa-a9b4-0a272ed9aebb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d4d94eb5-a4ce-4bfa-a9b4-0a272ed9aebb"}]} [ 692.345043] env[62383]: DEBUG oslo_vmware.api [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451365, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.356615] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 692.368941] env[62383]: DEBUG oslo_vmware.api [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Task: {'id': task-2451362, 'name': ReconfigVM_Task, 'duration_secs': 0.159597} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.369261] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496412', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'name': 'volume-665dea38-a0be-4a97-be00-22d6a9018fe3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '14bb9b79-d224-4a64-861e-30dd919c5741', 'attached_at': '', 'detached_at': '', 'volume_id': '665dea38-a0be-4a97-be00-22d6a9018fe3', 'serial': '665dea38-a0be-4a97-be00-22d6a9018fe3'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 692.374485] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 692.374664] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 692.389024] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 692.410179] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 692.520922] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451356, 'name': CloneVM_Task, 'duration_secs': 1.757817} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.521214] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Created linked-clone VM from snapshot [ 692.522883] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb67ec25-45fd-4440-87ad-26114ea3bc48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.531574] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Uploading image 6f1f859b-fe53-4112-82ab-d69109fbccbf {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 692.557791] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 692.557791] env[62383]: value = "vm-496421" [ 692.557791] env[62383]: _type = "VirtualMachine" [ 692.557791] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 692.558076] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-267acf95-1371-4299-9ec8-a2601a0e28d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.565247] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451363, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.566725] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease: (returnval){ [ 692.566725] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525ccd30-ddb5-25b4-9a26-a5b42182a150" [ 692.566725] env[62383]: _type = "HttpNfcLease" [ 692.566725] env[62383]: } obtained for exporting VM: (result){ [ 692.566725] env[62383]: value = "vm-496421" [ 692.566725] env[62383]: _type = "VirtualMachine" [ 692.566725] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 692.566989] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the lease: (returnval){ [ 692.566989] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525ccd30-ddb5-25b4-9a26-a5b42182a150" [ 692.566989] env[62383]: _type = "HttpNfcLease" [ 692.566989] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 692.576054] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 692.576054] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525ccd30-ddb5-25b4-9a26-a5b42182a150" [ 692.576054] env[62383]: _type = "HttpNfcLease" [ 692.576054] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 692.616409] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451364, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077841} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.619204] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.620694] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f847c01-5ff4-4b82-9ae7-94282252e3ac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.655831] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.659743] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64e88c8c-bd6b-4467-a7ea-56b9883cf7b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.689586] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.689873] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.697487] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 692.697487] env[62383]: value = "task-2451367" [ 692.697487] env[62383]: _type = "Task" [ 692.697487] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.713983] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451367, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.832326] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 692.834080] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1745fef9-4290-43b8-99ec-9aaa82903b1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.844823] env[62383]: DEBUG oslo_vmware.api [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451365, 'name': PowerOffVM_Task, 'duration_secs': 0.357764} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.846191] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 692.846518] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 692.846895] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 692.846895] env[62383]: value = "task-2451368" [ 692.846895] env[62383]: _type = "Task" [ 692.846895] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.849840] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17fa4aa7-415f-46e0-9c38-4ac9cfe33298 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.860584] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451368, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.934900] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 692.934991] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 692.936402] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Deleting the datastore file [datastore2] 67d41910-54e1-48f1-b0d3-f34a62595ef2 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 692.938106] env[62383]: DEBUG nova.objects.instance [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lazy-loading 'flavor' on Instance uuid 14bb9b79-d224-4a64-861e-30dd919c5741 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 692.939686] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-517dfe16-2c0c-4a15-87b5-7a3a74fd1f6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.951645] env[62383]: DEBUG oslo_vmware.api [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 692.951645] env[62383]: value = "task-2451370" [ 692.951645] env[62383]: _type = "Task" [ 692.951645] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.969934] env[62383]: DEBUG oslo_vmware.api [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.062134] env[62383]: DEBUG oslo_vmware.api [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451363, 'name': PowerOnVM_Task, 'duration_secs': 0.528955} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.063242] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 693.063477] env[62383]: DEBUG nova.compute.manager [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 693.064529] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38f71f1-9b01-46fd-a393-6f1a6866bee7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.067176] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820a1772-6f94-406b-999d-abcf51fbf6d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.077457] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49259203-9f82-40da-9bdc-c07da978bf5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.084725] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 693.084725] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525ccd30-ddb5-25b4-9a26-a5b42182a150" [ 693.084725] env[62383]: _type = "HttpNfcLease" [ 693.084725] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 693.085540] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 693.085540] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525ccd30-ddb5-25b4-9a26-a5b42182a150" [ 693.085540] env[62383]: _type = "HttpNfcLease" [ 693.085540] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 693.086295] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f297b7-84fb-4d34-baa7-8ddf7e9cc8de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.113663] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9426dd84-ba4a-446b-b135-a622fcbaa9f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.119750] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52964f4b-90a9-ab03-99bc-feeb49a26bb3/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 693.119964] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52964f4b-90a9-ab03-99bc-feeb49a26bb3/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 693.182396] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cf74e9-a5c8-4008-9281-c8474b6fdede {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.200480] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 693.210787] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.217812] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6e8fcc95-0c59-4128-b88b-bdc2090efda9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.362595] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451368, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.461559] env[62383]: DEBUG oslo_vmware.api [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159646} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.462181] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 693.462283] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 693.462452] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.462967] env[62383]: INFO nova.compute.manager [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 693.462967] env[62383]: DEBUG oslo.service.loopingcall [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 693.463700] env[62383]: DEBUG nova.compute.manager [-] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 693.463812] env[62383]: DEBUG nova.network.neutron [-] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.598700] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.717150] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451367, 'name': ReconfigVM_Task, 'duration_secs': 0.693} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.718089] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Reconfigured VM instance instance-00000028 to attach disk [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 693.718784] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03366d2f-1f7a-4c09-8051-40e9c77af52f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.730126] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 693.730126] env[62383]: value = "task-2451371" [ 693.730126] env[62383]: _type = "Task" [ 693.730126] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.744259] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451371, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.753241] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 65 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 693.756129] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 65 to 66 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 693.756414] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 693.866992] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451368, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.949026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-27a19591-fdcf-42f7-b2ad-00a1e8ebb194 tempest-VolumesAssistedSnapshotsTest-711608178 tempest-VolumesAssistedSnapshotsTest-711608178-project-admin] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.357s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.986162] env[62383]: DEBUG nova.compute.manager [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 693.986162] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94739c0-37a4-4e6e-92b7-b8397a371505 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.199451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "a16193af-410e-4bf6-bb06-a97791cf6060" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.199451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a16193af-410e-4bf6-bb06-a97791cf6060" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.199451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "a16193af-410e-4bf6-bb06-a97791cf6060-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.199451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a16193af-410e-4bf6-bb06-a97791cf6060-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.199699] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a16193af-410e-4bf6-bb06-a97791cf6060-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.200922] env[62383]: INFO nova.compute.manager [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Terminating instance [ 694.244719] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451371, 'name': Rename_Task, 'duration_secs': 0.230331} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.245013] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 694.245337] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7af95632-cead-4ba9-9a14-5efbeb11a1cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.253814] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 694.253814] env[62383]: value = "task-2451372" [ 694.253814] env[62383]: _type = "Task" [ 694.253814] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.265399] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 694.265724] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.975s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.266067] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.266475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.454s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.268061] env[62383]: INFO nova.compute.claims [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 694.363838] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451368, 'name': CreateSnapshot_Task, 'duration_secs': 1.035105} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.365456] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 694.367062] env[62383]: DEBUG nova.compute.manager [req-b71b8725-5a83-498f-ab2d-f670d0ed87f0 req-89fb93dc-6ce3-467f-9008-7e77bd1cd4ee service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Received event network-vif-deleted-cc45bc62-e82d-40dc-b803-56b790aca5d4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 694.367334] env[62383]: INFO nova.compute.manager [req-b71b8725-5a83-498f-ab2d-f670d0ed87f0 req-89fb93dc-6ce3-467f-9008-7e77bd1cd4ee service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Neutron deleted interface cc45bc62-e82d-40dc-b803-56b790aca5d4; detaching it from the instance and deleting it from the info cache [ 694.367829] env[62383]: DEBUG nova.network.neutron [req-b71b8725-5a83-498f-ab2d-f670d0ed87f0 req-89fb93dc-6ce3-467f-9008-7e77bd1cd4ee service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.369858] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6bfd1d-a226-4ce9-a223-1dcd0f78bf01 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.500043] env[62383]: INFO nova.compute.manager [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] instance snapshotting [ 694.500043] env[62383]: WARNING nova.compute.manager [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 694.501082] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b97825-3be6-434e-8727-a34f51404ee3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.529067] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852ac183-d086-49fc-b2f0-ed13db133157 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.708017] env[62383]: DEBUG nova.compute.manager [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 694.708017] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 694.708017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d0b8cd-f9c5-4005-b2f1-fec04046a0bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.713892] env[62383]: DEBUG nova.network.neutron [-] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.720759] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 694.720759] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7c94c31-31b5-4f0d-8ba1-bc4a479cc012 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.730503] env[62383]: DEBUG oslo_vmware.api [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 694.730503] env[62383]: value = "task-2451374" [ 694.730503] env[62383]: _type = "Task" [ 694.730503] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.745872] env[62383]: DEBUG oslo_vmware.api [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.765979] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.808148] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "93234e99-268f-491e-96bd-a77f4c9f164b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.810355] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 694.876488] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad71ffdb-170a-49bc-bd5a-e45345f87c59 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.892129] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 694.892575] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e6d494a0-9e92-4518-ac3b-266c74c747c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.899554] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479abe05-fbd7-4b76-bc0c-d992d08f2c4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.919446] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 694.919446] env[62383]: value = "task-2451375" [ 694.919446] env[62383]: _type = "Task" [ 694.919446] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.931714] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451375, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.951988] env[62383]: DEBUG nova.compute.manager [req-b71b8725-5a83-498f-ab2d-f670d0ed87f0 req-89fb93dc-6ce3-467f-9008-7e77bd1cd4ee service nova] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Detach interface failed, port_id=cc45bc62-e82d-40dc-b803-56b790aca5d4, reason: Instance 67d41910-54e1-48f1-b0d3-f34a62595ef2 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 695.045095] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 695.045415] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5002b7e9-5b94-4bd7-ac80-b9d96d7f2838 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.054871] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 695.054871] env[62383]: value = "task-2451376" [ 695.054871] env[62383]: _type = "Task" [ 695.054871] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.064042] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451376, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.222216] env[62383]: INFO nova.compute.manager [-] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Took 1.76 seconds to deallocate network for instance. [ 695.242314] env[62383]: DEBUG oslo_vmware.api [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451374, 'name': PowerOffVM_Task, 'duration_secs': 0.198023} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.242314] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.242314] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.242314] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7fe5657-38ed-4fca-8b32-7ca6c2747d97 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.269855] env[62383]: DEBUG oslo_vmware.api [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451372, 'name': PowerOnVM_Task, 'duration_secs': 0.714105} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.269855] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 695.269855] env[62383]: INFO nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Took 8.94 seconds to spawn the instance on the hypervisor. [ 695.269855] env[62383]: DEBUG nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 695.271316] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b954093d-502f-4f75-b67e-4a8abbce269b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.321480] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.321736] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.321990] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleting the datastore file [datastore2] a16193af-410e-4bf6-bb06-a97791cf6060 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.322449] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cb00370-702e-40b1-bdba-92b63b591814 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.331112] env[62383]: DEBUG oslo_vmware.api [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 695.331112] env[62383]: value = "task-2451378" [ 695.331112] env[62383]: _type = "Task" [ 695.331112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.346091] env[62383]: DEBUG oslo_vmware.api [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.434118] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451375, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.565089] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451376, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.729196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.801328] env[62383]: INFO nova.compute.manager [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Took 45.60 seconds to build instance. [ 695.843480] env[62383]: DEBUG oslo_vmware.api [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176437} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.846196] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 695.846196] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 695.846196] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 695.846478] env[62383]: INFO nova.compute.manager [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Took 1.14 seconds to destroy the instance on the hypervisor. [ 695.846702] env[62383]: DEBUG oslo.service.loopingcall [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 695.847108] env[62383]: DEBUG nova.compute.manager [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 695.847224] env[62383]: DEBUG nova.network.neutron [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 695.875183] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8799e0f0-61ba-4890-a50c-8bf2973f9a95 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.884067] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6acd6e-eef9-478f-b534-c66904322b63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.919689] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9225942-7065-485d-b61a-00b2cd49c2a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.943937] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd26c5f1-2dc1-42d7-9f4d-0996da651220 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.948165] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451375, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.960473] env[62383]: DEBUG nova.compute.provider_tree [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.971562] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.971562] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.971776] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.971959] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.972145] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.974782] env[62383]: INFO nova.compute.manager [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Terminating instance [ 696.066545] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451376, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.307769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc7a3ac1-f1ed-4559-bdf9-9ae106d6cb49 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.547s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.426666] env[62383]: DEBUG nova.compute.manager [req-ddf2f90d-40a6-4a56-8f37-8da7af8ad32b req-1b490b9d-b22b-460c-9bc4-561dd2bedd70 service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Received event network-vif-deleted-9648e8c9-183c-4825-9b3d-25732ebd4892 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 696.426826] env[62383]: INFO nova.compute.manager [req-ddf2f90d-40a6-4a56-8f37-8da7af8ad32b req-1b490b9d-b22b-460c-9bc4-561dd2bedd70 service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Neutron deleted interface 9648e8c9-183c-4825-9b3d-25732ebd4892; detaching it from the instance and deleting it from the info cache [ 696.427019] env[62383]: DEBUG nova.network.neutron [req-ddf2f90d-40a6-4a56-8f37-8da7af8ad32b req-1b490b9d-b22b-460c-9bc4-561dd2bedd70 service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.429057] env[62383]: INFO nova.compute.manager [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Rescuing [ 696.429294] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.429526] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.429679] env[62383]: DEBUG nova.network.neutron [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.442440] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451375, 'name': CloneVM_Task, 'duration_secs': 1.317337} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.442704] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Created linked-clone VM from snapshot [ 696.443465] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dbc9f5-57f0-473c-b168-250f35ed0353 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.452500] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Uploading image 5465739d-3e38-4577-aeb1-56c054db252e {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 696.463811] env[62383]: DEBUG nova.scheduler.client.report [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 696.480035] env[62383]: DEBUG nova.compute.manager [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 696.480035] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.480735] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9671d0-e8ba-42c6-a8f5-2933e6da5eb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.486593] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 696.486593] env[62383]: value = "vm-496423" [ 696.486593] env[62383]: _type = "VirtualMachine" [ 696.486593] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 696.487091] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-105d514e-5a01-49b2-918e-119f3b7db38f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.495148] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 696.496568] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6895df82-35ff-4c46-91a7-6c8427cced1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.498266] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lease: (returnval){ [ 696.498266] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5244e9d6-0a97-930c-a9f5-92c95dec99fe" [ 696.498266] env[62383]: _type = "HttpNfcLease" [ 696.498266] env[62383]: } obtained for exporting VM: (result){ [ 696.498266] env[62383]: value = "vm-496423" [ 696.498266] env[62383]: _type = "VirtualMachine" [ 696.498266] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 696.498582] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the lease: (returnval){ [ 696.498582] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5244e9d6-0a97-930c-a9f5-92c95dec99fe" [ 696.498582] env[62383]: _type = "HttpNfcLease" [ 696.498582] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 696.506020] env[62383]: DEBUG oslo_vmware.api [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 696.506020] env[62383]: value = "task-2451380" [ 696.506020] env[62383]: _type = "Task" [ 696.506020] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.511065] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 696.511065] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5244e9d6-0a97-930c-a9f5-92c95dec99fe" [ 696.511065] env[62383]: _type = "HttpNfcLease" [ 696.511065] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 696.516336] env[62383]: DEBUG oslo_vmware.api [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.568357] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451376, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.737090] env[62383]: DEBUG nova.network.neutron [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.811229] env[62383]: DEBUG nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 696.929813] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b099568-fa02-4fab-b07b-4275adde3037 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.942631] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb02afa-2b09-4f66-b010-cdb3023075f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.970569] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.971217] env[62383]: DEBUG nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 696.992030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.883s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.992241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.994428] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.022s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.996103] env[62383]: INFO nova.compute.claims [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.999722] env[62383]: DEBUG nova.compute.manager [req-ddf2f90d-40a6-4a56-8f37-8da7af8ad32b req-1b490b9d-b22b-460c-9bc4-561dd2bedd70 service nova] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Detach interface failed, port_id=9648e8c9-183c-4825-9b3d-25732ebd4892, reason: Instance a16193af-410e-4bf6-bb06-a97791cf6060 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 697.012804] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 697.012804] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5244e9d6-0a97-930c-a9f5-92c95dec99fe" [ 697.012804] env[62383]: _type = "HttpNfcLease" [ 697.012804] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 697.013598] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 697.013598] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5244e9d6-0a97-930c-a9f5-92c95dec99fe" [ 697.013598] env[62383]: _type = "HttpNfcLease" [ 697.013598] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 697.014495] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37910ca4-b419-4819-b3b1-9f9edee4df03 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.020764] env[62383]: DEBUG oslo_vmware.api [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451380, 'name': PowerOffVM_Task, 'duration_secs': 0.226559} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.023256] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 697.023256] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 697.023256] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea2fc2a8-268d-4a44-96b7-214cefd12771 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.027859] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0b49-20cb-aa0c-3b61-9e9770b4567f/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 697.028142] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0b49-20cb-aa0c-3b61-9e9770b4567f/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 697.098719] env[62383]: INFO nova.scheduler.client.report [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Deleted allocations for instance 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be [ 697.112092] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 697.112092] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 697.112092] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Deleting the datastore file [datastore1] e51a0dd7-b5da-44cb-9cd8-62932aec3ad5 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 697.114468] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c7fba40-37ff-42f2-8447-a0858a3dacea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.117042] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451376, 'name': CreateSnapshot_Task, 'duration_secs': 1.563061} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.119234] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 697.120394] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034a1345-5f5f-448c-b55b-907fc73bc92c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.124702] env[62383]: DEBUG oslo_vmware.api [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for the task: (returnval){ [ 697.124702] env[62383]: value = "task-2451382" [ 697.124702] env[62383]: _type = "Task" [ 697.124702] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.141917] env[62383]: DEBUG oslo_vmware.api [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.166512] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ff24e518-aa0f-4af8-9246-82aa871c3a67 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.238834] env[62383]: INFO nova.compute.manager [-] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Took 1.39 seconds to deallocate network for instance. [ 697.326734] env[62383]: DEBUG nova.network.neutron [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Updating instance_info_cache with network_info: [{"id": "241eb943-d5b6-4224-b2fb-c12596e3b206", "address": "fa:16:3e:af:5a:54", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241eb943-d5", "ovs_interfaceid": "241eb943-d5b6-4224-b2fb-c12596e3b206", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.341607] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.503030] env[62383]: DEBUG nova.compute.utils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 697.504786] env[62383]: DEBUG nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 697.505079] env[62383]: DEBUG nova.network.neutron [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 697.581993] env[62383]: DEBUG nova.policy [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b240965406114137914b03ac96806ea2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c04aced555934225bc58a044bfb4bc35', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 697.609032] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1a7f397-6abc-4ef1-b682-341350b0d50c tempest-ImagesOneServerTestJSON-231614157 tempest-ImagesOneServerTestJSON-231614157-project-member] Lock "0dd47ff3-5a5b-4c51-8e6a-fc11449f21be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.082s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.644683] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 697.645573] env[62383]: DEBUG oslo_vmware.api [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Task: {'id': task-2451382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231283} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.646430] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-75b47c20-d61f-4ca4-bdfa-e8cb5a286c26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.651246] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.651788] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.652027] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.652230] env[62383]: INFO nova.compute.manager [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 697.652565] env[62383]: DEBUG oslo.service.loopingcall [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.652729] env[62383]: DEBUG nova.compute.manager [-] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 697.652846] env[62383]: DEBUG nova.network.neutron [-] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.662343] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 697.662343] env[62383]: value = "task-2451383" [ 697.662343] env[62383]: _type = "Task" [ 697.662343] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.673325] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451383, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.749216] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.829990] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "refresh_cache-ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.012265] env[62383]: DEBUG nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 698.153079] env[62383]: DEBUG nova.network.neutron [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Successfully created port: c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 698.178074] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451383, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.286288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "14bb9b79-d224-4a64-861e-30dd919c5741" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.289563] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.290025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "14bb9b79-d224-4a64-861e-30dd919c5741-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.290339] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "14bb9b79-d224-4a64-861e-30dd919c5741-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.290534] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "14bb9b79-d224-4a64-861e-30dd919c5741-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.294013] env[62383]: INFO nova.compute.manager [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Terminating instance [ 698.680486] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451383, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.709884] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf3bce0-2ede-41f1-92ed-e0f8e9c3251c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.722420] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9e4301-d840-4590-ad5e-dbfd084b2501 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.760563] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d8fda4-c083-452d-a319-254a9c2eed8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.769664] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15217fbc-aa78-49ba-abfd-0acd519000f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.785023] env[62383]: DEBUG nova.compute.provider_tree [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.803605] env[62383]: DEBUG nova.compute.manager [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 698.803605] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 698.803808] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68115542-e5fc-4fa2-90ae-aa78c9b9a944 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.813297] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 698.813686] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28390c36-a5df-4273-b31c-b9d04c0e3f2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.822964] env[62383]: DEBUG oslo_vmware.api [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 698.822964] env[62383]: value = "task-2451384" [ 698.822964] env[62383]: _type = "Task" [ 698.822964] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.833430] env[62383]: DEBUG oslo_vmware.api [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.030937] env[62383]: DEBUG nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 699.155313] env[62383]: DEBUG nova.network.neutron [-] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.175481] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451383, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.289055] env[62383]: DEBUG nova.scheduler.client.report [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 699.334153] env[62383]: DEBUG oslo_vmware.api [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451384, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.657967] env[62383]: INFO nova.compute.manager [-] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Took 2.00 seconds to deallocate network for instance. [ 699.677810] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451383, 'name': CloneVM_Task, 'duration_secs': 1.966104} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.678219] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Created linked-clone VM from snapshot [ 699.678981] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a8906a-c667-486f-ba19-e8375dd3b365 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.687900] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Uploading image 35aacbb9-9f29-4179-83e7-370081c601df {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 699.794620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.800s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.795385] env[62383]: DEBUG nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 699.797963] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.781s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.800022] env[62383]: INFO nova.compute.claims [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.834501] env[62383]: DEBUG oslo_vmware.api [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451384, 'name': PowerOffVM_Task, 'duration_secs': 0.629677} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.835109] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 699.835435] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 699.835823] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79db1b0b-f788-4692-aee0-5aa252e43aaa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.864580] env[62383]: DEBUG nova.network.neutron [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Successfully updated port: c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.905654] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 699.906039] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 699.906311] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Deleting the datastore file [datastore2] 14bb9b79-d224-4a64-861e-30dd919c5741 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 699.906624] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21c2cfcd-0c08-4316-8946-19d496549105 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.915616] env[62383]: DEBUG oslo_vmware.api [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for the task: (returnval){ [ 699.915616] env[62383]: value = "task-2451386" [ 699.915616] env[62383]: _type = "Task" [ 699.915616] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.926382] env[62383]: DEBUG oslo_vmware.api [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.165068] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.305045] env[62383]: DEBUG nova.compute.utils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 700.309627] env[62383]: DEBUG nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 700.309938] env[62383]: DEBUG nova.network.neutron [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 700.367177] env[62383]: DEBUG nova.policy [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81a500b5135846b4aad2898b50c4d807', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b99120a31cb46348ef76f6aea1e26cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 700.369326] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 700.369605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.369791] env[62383]: DEBUG nova.network.neutron [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 700.427773] env[62383]: DEBUG oslo_vmware.api [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Task: {'id': task-2451386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182248} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.428145] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 700.428376] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 700.428613] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.428834] env[62383]: INFO nova.compute.manager [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Took 1.63 seconds to destroy the instance on the hypervisor. [ 700.429157] env[62383]: DEBUG oslo.service.loopingcall [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 700.429391] env[62383]: DEBUG nova.compute.manager [-] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 700.429501] env[62383]: DEBUG nova.network.neutron [-] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.671766] env[62383]: DEBUG nova.network.neutron [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Successfully created port: daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 700.810139] env[62383]: DEBUG nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 700.926857] env[62383]: DEBUG nova.network.neutron [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 701.215757] env[62383]: DEBUG nova.network.neutron [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updating instance_info_cache with network_info: [{"id": "c23968b2-dbec-433d-8bcc-80644a89ec08", "address": "fa:16:3e:3f:6c:37", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23968b2-db", "ovs_interfaceid": "c23968b2-dbec-433d-8bcc-80644a89ec08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.428310] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51bda02-9856-4af5-8d24-d4e0758071d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.436724] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3377c661-2361-46b4-88c6-9ebaec1d2c4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.470750] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67e91aa-dfd9-4048-ab2e-1442f31aeeec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.480176] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0799d1d-86d3-46ff-83c1-6e32c72a8924 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.495399] env[62383]: DEBUG nova.compute.provider_tree [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.722315] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Releasing lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 701.722654] env[62383]: DEBUG nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Instance network_info: |[{"id": "c23968b2-dbec-433d-8bcc-80644a89ec08", "address": "fa:16:3e:3f:6c:37", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23968b2-db", "ovs_interfaceid": "c23968b2-dbec-433d-8bcc-80644a89ec08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 701.754249] env[62383]: DEBUG nova.network.neutron [-] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.828353] env[62383]: DEBUG nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 701.999027] env[62383]: DEBUG nova.scheduler.client.report [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.257011] env[62383]: INFO nova.compute.manager [-] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Took 1.83 seconds to deallocate network for instance. [ 702.440233] env[62383]: DEBUG nova.network.neutron [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Successfully updated port: daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 702.503118] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 702.503368] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.503687] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 702.503687] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.503835] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 702.503983] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 702.504415] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 702.504620] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 702.504833] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 702.505038] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 702.505373] env[62383]: DEBUG nova.virt.hardware [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 702.507565] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.508148] env[62383]: DEBUG nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 702.511893] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f910682-71de-4fa3-9505-cb76337819d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.522020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.554s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.522020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.522020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.300s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.524182] env[62383]: INFO nova.compute.claims [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.533524] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 702.533524] env[62383]: value = "vm-496425" [ 702.533524] env[62383]: _type = "VirtualMachine" [ 702.533524] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 702.533524] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c5a7eb4f-93c6-49cd-879e-6af1664cc5ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.539444] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7caa581d-bf2f-4e16-ae11-a57729b2bd35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.546191] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease: (returnval){ [ 702.546191] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5288e68b-e312-2c4b-8a7a-cc3f0035e545" [ 702.546191] env[62383]: _type = "HttpNfcLease" [ 702.546191] env[62383]: } obtained for exporting VM: (result){ [ 702.546191] env[62383]: value = "vm-496425" [ 702.546191] env[62383]: _type = "VirtualMachine" [ 702.546191] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 702.546191] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the lease: (returnval){ [ 702.546191] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5288e68b-e312-2c4b-8a7a-cc3f0035e545" [ 702.546191] env[62383]: _type = "HttpNfcLease" [ 702.546191] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 702.552024] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 702.552024] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.552024] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 702.552297] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.552297] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 702.552297] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 702.552297] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 702.552297] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 702.552442] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 702.552442] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 702.552442] env[62383]: DEBUG nova.virt.hardware [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 702.553307] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d3f069-58c4-425f-940a-5892f9b09595 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.568831] env[62383]: INFO nova.scheduler.client.report [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleted allocations for instance 330b5e35-3292-4df7-b288-547b158e671a [ 702.573343] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:6c:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c23968b2-dbec-433d-8bcc-80644a89ec08', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 702.581386] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Creating folder: Project (c04aced555934225bc58a044bfb4bc35). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 702.587203] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52964f4b-90a9-ab03-99bc-feeb49a26bb3/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 702.587857] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a189b4e0-4ee3-4305-b7f3-466d655165e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.590422] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d44118e-7fc5-47e2-822c-6fd68248d8ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.598312] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b36b149-b88e-4ff5-915f-32e6ffd6eab3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.605498] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 702.605498] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5288e68b-e312-2c4b-8a7a-cc3f0035e545" [ 702.605498] env[62383]: _type = "HttpNfcLease" [ 702.605498] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 702.605857] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52964f4b-90a9-ab03-99bc-feeb49a26bb3/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 702.606049] env[62383]: ERROR oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52964f4b-90a9-ab03-99bc-feeb49a26bb3/disk-0.vmdk due to incomplete transfer. [ 702.606750] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 702.606750] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5288e68b-e312-2c4b-8a7a-cc3f0035e545" [ 702.606750] env[62383]: _type = "HttpNfcLease" [ 702.606750] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 702.607819] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d613f56a-488d-4515-81a9-5324ab4a986c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.609200] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd368a7d-39c2-4061-b230-de5aea19d47d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.624026] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Created folder: Project (c04aced555934225bc58a044bfb4bc35) in parent group-v496304. [ 702.624026] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Creating folder: Instances. Parent ref: group-v496426. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 702.624026] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d21c0a93-58fc-46a5-b6f1-b193240cd8f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.629294] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5228533e-c36d-c0c5-2bf4-7dc125f7de75/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 702.629542] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5228533e-c36d-c0c5-2bf4-7dc125f7de75/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 702.633249] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52964f4b-90a9-ab03-99bc-feeb49a26bb3/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 702.634154] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Uploaded image 6f1f859b-fe53-4112-82ab-d69109fbccbf to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 702.635863] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 702.637201] env[62383]: DEBUG nova.compute.manager [req-c245c864-b431-4547-b024-6704219fb05f req-eeaa4f5a-11af-4761-a877-7ba51750f2e0 service nova] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Received event network-vif-deleted-b44ff87a-66f9-4720-9a57-b485496554c7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 702.638678] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4ea16bd7-4137-4c38-b209-ff154901e3a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.697923] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Created folder: Instances in parent group-v496426. [ 702.698085] env[62383]: DEBUG oslo.service.loopingcall [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 702.698582] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 702.699842] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cc2b6cf-a0c2-49aa-8d5a-8a1ecd4a8edb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.717222] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 702.717222] env[62383]: value = "task-2451390" [ 702.717222] env[62383]: _type = "Task" [ 702.717222] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.723394] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 702.723394] env[62383]: value = "task-2451391" [ 702.723394] env[62383]: _type = "Task" [ 702.723394] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.727084] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451390, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.735738] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451391, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.756347] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7a4e2343-f16c-40a9-8ab1-4647aacae8b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.765858] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.947521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.947521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquired lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.947521] env[62383]: DEBUG nova.network.neutron [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.020914] env[62383]: DEBUG nova.compute.utils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.023310] env[62383]: DEBUG nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 703.024551] env[62383]: DEBUG nova.network.neutron [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 703.100551] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e597c2b9-da08-464a-a03b-f00b4d44c85c tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "330b5e35-3292-4df7-b288-547b158e671a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.580s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.133031] env[62383]: DEBUG nova.policy [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c294a0cc4e6446afabfb754ba2437a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83304cfb5deb443880252c194e249565', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 703.237501] env[62383]: DEBUG nova.compute.manager [req-0cfcfe9e-55c3-4a76-867c-ca171d10bb32 req-3c0295fd-6aad-47b3-94f6-bdfa5c656978 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received event network-vif-plugged-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 703.238369] env[62383]: DEBUG oslo_concurrency.lockutils [req-0cfcfe9e-55c3-4a76-867c-ca171d10bb32 req-3c0295fd-6aad-47b3-94f6-bdfa5c656978 service nova] Acquiring lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.238615] env[62383]: DEBUG oslo_concurrency.lockutils [req-0cfcfe9e-55c3-4a76-867c-ca171d10bb32 req-3c0295fd-6aad-47b3-94f6-bdfa5c656978 service nova] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.238800] env[62383]: DEBUG oslo_concurrency.lockutils [req-0cfcfe9e-55c3-4a76-867c-ca171d10bb32 req-3c0295fd-6aad-47b3-94f6-bdfa5c656978 service nova] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.239070] env[62383]: DEBUG nova.compute.manager [req-0cfcfe9e-55c3-4a76-867c-ca171d10bb32 req-3c0295fd-6aad-47b3-94f6-bdfa5c656978 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] No waiting events found dispatching network-vif-plugged-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 703.239305] env[62383]: WARNING nova.compute.manager [req-0cfcfe9e-55c3-4a76-867c-ca171d10bb32 req-3c0295fd-6aad-47b3-94f6-bdfa5c656978 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received unexpected event network-vif-plugged-c23968b2-dbec-433d-8bcc-80644a89ec08 for instance with vm_state building and task_state spawning. [ 703.244617] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451390, 'name': Destroy_Task, 'duration_secs': 0.432167} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.245229] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Destroyed the VM [ 703.245786] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 703.246136] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d3b13a8a-e272-4a79-8b03-482c4821feea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.251976] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451391, 'name': CreateVM_Task, 'duration_secs': 0.506721} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.253321] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 703.253771] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.254190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.254600] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 703.255353] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fda9e79-3e3a-48bd-8d7f-fa50a33c3a6b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.259774] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 703.259774] env[62383]: value = "task-2451392" [ 703.259774] env[62383]: _type = "Task" [ 703.259774] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.266338] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 703.266338] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52984568-8d53-0d5c-cbb2-d5b5fbffefc7" [ 703.266338] env[62383]: _type = "Task" [ 703.266338] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.278162] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451392, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.282486] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52984568-8d53-0d5c-cbb2-d5b5fbffefc7, 'name': SearchDatastore_Task, 'duration_secs': 0.013725} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.282803] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.283080] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 703.283460] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.285519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.285519] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 703.285519] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ece72493-d5d0-4d6e-9355-999e992cf6fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.296339] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 703.296896] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 703.297925] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b959e6a3-f038-43e3-bfe0-d26d145426f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.308820] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 703.308820] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5276a02a-c3f0-db99-f86d-cd65bd7edd11" [ 703.308820] env[62383]: _type = "Task" [ 703.308820] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.321610] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5276a02a-c3f0-db99-f86d-cd65bd7edd11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.518603] env[62383]: DEBUG nova.network.neutron [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 703.524838] env[62383]: DEBUG nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 703.548307] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "80821717-f961-49c7-8b79-c152edfdfb94" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.548307] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.722140] env[62383]: DEBUG nova.network.neutron [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Updating instance_info_cache with network_info: [{"id": "daaf9854-b852-4045-8380-ae136341958b", "address": "fa:16:3e:89:d7:1f", "network": {"id": "b0fc25d0-9d02-4e96-be67-5f3c47804b3c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1568827313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b99120a31cb46348ef76f6aea1e26cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaaf9854-b8", "ovs_interfaceid": "daaf9854-b852-4045-8380-ae136341958b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.749619] env[62383]: DEBUG nova.network.neutron [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Successfully created port: d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.772388] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451392, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.822740] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5276a02a-c3f0-db99-f86d-cd65bd7edd11, 'name': SearchDatastore_Task, 'duration_secs': 0.011986} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.826798] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d80c09-fa7c-4599-9945-cbb9669873eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.834727] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 703.834727] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5298ff7a-4e33-8669-07f0-b957622483e4" [ 703.834727] env[62383]: _type = "Task" [ 703.834727] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.850021] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5298ff7a-4e33-8669-07f0-b957622483e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.006109] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 704.009435] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a72f19ee-87b3-43df-8623-9e9d47644d7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.021401] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 704.021401] env[62383]: value = "task-2451393" [ 704.021401] env[62383]: _type = "Task" [ 704.021401] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.041671] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.057354] env[62383]: DEBUG nova.compute.utils [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.231912] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Releasing lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.232309] env[62383]: DEBUG nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Instance network_info: |[{"id": "daaf9854-b852-4045-8380-ae136341958b", "address": "fa:16:3e:89:d7:1f", "network": {"id": "b0fc25d0-9d02-4e96-be67-5f3c47804b3c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1568827313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b99120a31cb46348ef76f6aea1e26cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaaf9854-b8", "ovs_interfaceid": "daaf9854-b852-4045-8380-ae136341958b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 704.233064] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:d7:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15922696-dc08-44ef-97be-0b09a9dfeae8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'daaf9854-b852-4045-8380-ae136341958b', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.242497] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Creating folder: Project (3b99120a31cb46348ef76f6aea1e26cd). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 704.242990] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1182278-3c3c-4a7c-94ed-705370333f92 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.260961] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Created folder: Project (3b99120a31cb46348ef76f6aea1e26cd) in parent group-v496304. [ 704.261294] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Creating folder: Instances. Parent ref: group-v496429. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 704.268631] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-907fb628-95a4-4705-b995-f8bfaf8db752 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.277766] env[62383]: DEBUG oslo_vmware.api [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451392, 'name': RemoveSnapshot_Task, 'duration_secs': 0.962883} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.278047] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 704.278323] env[62383]: INFO nova.compute.manager [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Took 16.40 seconds to snapshot the instance on the hypervisor. [ 704.283249] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Created folder: Instances in parent group-v496429. [ 704.283689] env[62383]: DEBUG oslo.service.loopingcall [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 704.285202] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 704.286057] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac976fdc-8df6-4da4-aa64-3af7853710e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.289205] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6723acd9-05b3-4f37-bc7e-0833cd54226a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.311786] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3632a260-2c3f-4465-83e1-7e745180e028 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.317092] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 704.317092] env[62383]: value = "task-2451396" [ 704.317092] env[62383]: _type = "Task" [ 704.317092] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.356808] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ad31da-1ab6-4187-a461-6d2b326f4de9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.363456] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451396, 'name': CreateVM_Task} progress is 15%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.373578] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5298ff7a-4e33-8669-07f0-b957622483e4, 'name': SearchDatastore_Task, 'duration_secs': 0.011941} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.374917] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee429add-2e7f-4f17-a21d-ceef1832f7e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.379817] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.380206] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a/3810ae49-3b9d-4c5f-b579-8abddc8d6c1a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 704.380613] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c9f15d2-8a4d-4a6e-9b9e-e43a2ff82794 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.396024] env[62383]: DEBUG nova.compute.provider_tree [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.399370] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 704.399370] env[62383]: value = "task-2451397" [ 704.399370] env[62383]: _type = "Task" [ 704.399370] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.413563] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.533232] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451393, 'name': PowerOffVM_Task, 'duration_secs': 0.29803} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.533590] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 704.534486] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a34c7e-ed8d-4fa6-8177-b2784a07bd82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.556210] env[62383]: DEBUG nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 704.559775] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b87faa-fc44-4d96-85dd-1e7cb0eff34a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.563123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.015s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.832970] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451396, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.901745] env[62383]: DEBUG nova.scheduler.client.report [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 704.919344] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451397, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.329594] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451396, 'name': CreateVM_Task, 'duration_secs': 0.589552} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.329979] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 705.330552] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.330731] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.331090] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 705.331357] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-486621c9-72a2-4308-9cab-9d3c716305ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.336900] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 705.336900] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52384e8d-df78-b6be-dbe9-821f6321fb39" [ 705.336900] env[62383]: _type = "Task" [ 705.336900] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.346102] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52384e8d-df78-b6be-dbe9-821f6321fb39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.391640] env[62383]: DEBUG nova.network.neutron [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Successfully updated port: d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.411307] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.890s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.411836] env[62383]: DEBUG nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 705.415112] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.306s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.417187] env[62383]: INFO nova.compute.claims [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.427666] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582763} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.427934] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a/3810ae49-3b9d-4c5f-b579-8abddc8d6c1a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 705.428169] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 705.428488] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a85fa7bb-1b1c-415d-b87d-9d73f103439d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.437937] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 705.437937] env[62383]: value = "task-2451398" [ 705.437937] env[62383]: _type = "Task" [ 705.437937] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.449117] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.628326] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "80821717-f961-49c7-8b79-c152edfdfb94" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.628634] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.628886] env[62383]: INFO nova.compute.manager [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Attaching volume 789417f0-0a0f-41c6-9067-c1e9bc8fd22e to /dev/sdb [ 705.666498] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d54b43-1ab8-4f7f-93a5-839ce667ef5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.675637] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e62fbe8-57ef-4a62-b540-b8be463ae26c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.689097] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 705.689354] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 705.689500] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 705.689872] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 705.689872] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 705.690100] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 705.690266] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 705.690429] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 705.690595] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 705.690759] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 705.690935] env[62383]: DEBUG nova.virt.hardware [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 705.693090] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4117c7-5fb7-40fc-8fe5-4fc3ad845fed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.701659] env[62383]: DEBUG nova.virt.block_device [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Updating existing volume attachment record: bd102dc2-12d6-4385-9305-8777991ccccc {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 705.704445] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 705.706158] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f309769-dba8-463c-a5e9-c4a5350720d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.711543] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdea554-f630-46d8-a55b-ba3c0253881e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.718147] env[62383]: DEBUG nova.compute.manager [None req-c2961e4b-9a78-4cf4-853e-e39822cead7c tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Found 1 images (rotation: 2) {{(pid=62383) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 705.722810] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 705.722810] env[62383]: value = "task-2451399" [ 705.722810] env[62383]: _type = "Task" [ 705.722810] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.735391] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0b49-20cb-aa0c-3b61-9e9770b4567f/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 705.737069] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-decbf1d5-d640-4bb3-9063-15234bec9974 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.745758] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 705.746170] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 705.746652] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.746652] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.746885] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.747620] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53ed47cc-4047-44ff-b1a2-12a62f82462b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.752082] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0b49-20cb-aa0c-3b61-9e9770b4567f/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 705.752199] env[62383]: ERROR oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0b49-20cb-aa0c-3b61-9e9770b4567f/disk-0.vmdk due to incomplete transfer. [ 705.752799] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d618e01d-e692-4b74-a999-24e8aa32c70f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.760568] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.760770] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 705.762420] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d6328d9-4f3a-4e98-b747-f2b4684d4038 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.764728] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bf0b49-20cb-aa0c-3b61-9e9770b4567f/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 705.764917] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Uploaded image 5465739d-3e38-4577-aeb1-56c054db252e to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 705.766478] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 705.767011] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c592c1b2-9686-4277-9d26-ffb5a163189a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.773183] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 705.773183] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520b246a-28b0-594d-8175-4dab85a5cf30" [ 705.773183] env[62383]: _type = "Task" [ 705.773183] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.776830] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 705.776830] env[62383]: value = "task-2451400" [ 705.776830] env[62383]: _type = "Task" [ 705.776830] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.785571] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520b246a-28b0-594d-8175-4dab85a5cf30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.791198] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451400, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.848150] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52384e8d-df78-b6be-dbe9-821f6321fb39, 'name': SearchDatastore_Task, 'duration_secs': 0.011424} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.848814] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.848814] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 705.849084] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.849260] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.850618] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 705.850618] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08137b91-8817-4233-9c9e-0083e8c8154b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.859407] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 705.859610] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 705.860406] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d408f3b0-49b2-49d4-ab5b-7e55f4cf29db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.867100] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 705.867100] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52562f70-3a1e-d111-35a0-669b5629ed35" [ 705.867100] env[62383]: _type = "Task" [ 705.867100] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.876150] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52562f70-3a1e-d111-35a0-669b5629ed35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.894403] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.894729] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.894781] env[62383]: DEBUG nova.network.neutron [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.923102] env[62383]: DEBUG nova.compute.utils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 705.926559] env[62383]: DEBUG nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 705.926908] env[62383]: DEBUG nova.network.neutron [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 705.948896] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111333} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.949192] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 705.950733] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59fcdad-e260-4488-b64a-2788de821400 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.980530] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a/3810ae49-3b9d-4c5f-b579-8abddc8d6c1a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 705.981217] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a486cb4a-c807-41d2-816b-6f0fb7bd6f2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.002979] env[62383]: DEBUG nova.policy [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c19142cf904f4bc9a1b1cf41a1a77f9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e676769a71c843b6966b648ef3525fee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 706.006399] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 706.006399] env[62383]: value = "task-2451402" [ 706.006399] env[62383]: _type = "Task" [ 706.006399] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.015618] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451402, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.291075] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520b246a-28b0-594d-8175-4dab85a5cf30, 'name': SearchDatastore_Task, 'duration_secs': 0.011646} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.294834] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451400, 'name': Destroy_Task, 'duration_secs': 0.345208} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.295082] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10a90b70-5248-4181-94a7-f9ba64a40532 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.297485] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Destroyed the VM [ 706.300147] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 706.300147] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b67fe7a6-fd36-4f34-9b59-00173aa62f01 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.305651] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 706.305651] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b94329-01bc-ddf0-b6cf-e2f43659697b" [ 706.305651] env[62383]: _type = "Task" [ 706.305651] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.310986] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 706.310986] env[62383]: value = "task-2451405" [ 706.310986] env[62383]: _type = "Task" [ 706.310986] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.318041] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b94329-01bc-ddf0-b6cf-e2f43659697b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.324595] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451405, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.378789] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52562f70-3a1e-d111-35a0-669b5629ed35, 'name': SearchDatastore_Task, 'duration_secs': 0.009883} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.379912] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8d75947-42d3-41ff-817a-6fe48ec8e306 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.386409] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 706.386409] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525d6f31-7743-ffa4-b778-0b1e4684092d" [ 706.386409] env[62383]: _type = "Task" [ 706.386409] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.395640] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525d6f31-7743-ffa4-b778-0b1e4684092d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.431939] env[62383]: DEBUG nova.network.neutron [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.436667] env[62383]: DEBUG nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 706.531169] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451402, 'name': ReconfigVM_Task, 'duration_secs': 0.332812} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.531963] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a/3810ae49-3b9d-4c5f-b579-8abddc8d6c1a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 706.532629] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d027d2a-e077-4c03-b407-246684453bf6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.546237] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 706.546237] env[62383]: value = "task-2451406" [ 706.546237] env[62383]: _type = "Task" [ 706.546237] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.562418] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451406, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.614818] env[62383]: DEBUG nova.network.neutron [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Updating instance_info_cache with network_info: [{"id": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "address": "fa:16:3e:ed:c5:82", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2ef4e8d-94", "ovs_interfaceid": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.822487] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b94329-01bc-ddf0-b6cf-e2f43659697b, 'name': SearchDatastore_Task, 'duration_secs': 0.015515} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.823242] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.823523] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. {{(pid=62383) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 706.826558] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7e3b594-1274-4b26-bd67-f17ac1cc4532 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.832752] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451405, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.840077] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 706.840077] env[62383]: value = "task-2451407" [ 706.840077] env[62383]: _type = "Task" [ 706.840077] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.852930] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.903388] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525d6f31-7743-ffa4-b778-0b1e4684092d, 'name': SearchDatastore_Task, 'duration_secs': 0.010468} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.906288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.906653] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a/2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 706.907217] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-595b4632-7b62-4136-9e00-1bc936de4f09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.919021] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 706.919021] env[62383]: value = "task-2451408" [ 706.919021] env[62383]: _type = "Task" [ 706.919021] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.924326] env[62383]: DEBUG nova.network.neutron [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Successfully created port: 9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 706.933740] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451408, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.975753] env[62383]: DEBUG nova.compute.manager [req-df509d3f-5fcc-4e41-bb0f-aea3afed4344 req-bfa75fb2-361f-42fa-a159-41ba74ce0d02 service nova] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Received event network-vif-deleted-940c815a-1c07-492f-8b17-e4a57d123790 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 707.000619] env[62383]: DEBUG nova.compute.manager [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 707.000866] env[62383]: DEBUG nova.compute.manager [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing instance network info cache due to event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 707.001121] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Acquiring lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.001277] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Acquired lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.001465] env[62383]: DEBUG nova.network.neutron [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.060377] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451406, 'name': Rename_Task, 'duration_secs': 0.152493} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.063356] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 707.063910] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42989d7b-e114-4f1b-a629-29301281b88c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.073949] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 707.073949] env[62383]: value = "task-2451409" [ 707.073949] env[62383]: _type = "Task" [ 707.073949] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.079460] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73cbbde-4be9-4a03-be6d-0f33f0d55dc1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.088808] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451409, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.091982] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33d4d84-c0b0-4513-8b49-bcb2c0c137c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.127972] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.128402] env[62383]: DEBUG nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Instance network_info: |[{"id": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "address": "fa:16:3e:ed:c5:82", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2ef4e8d-94", "ovs_interfaceid": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 707.129223] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:c5:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '01fe2e08-46f6-4cee-aefd-934461f8077d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.137547] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Creating folder: Project (83304cfb5deb443880252c194e249565). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.138566] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0462cd99-6cbe-4ba9-b609-7b0247ff906b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.142115] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9783e1cb-5c42-4ede-b12a-d49f0fe4fa9f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.152773] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce77ce02-c2a5-4283-8904-2d03d2859fc4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.158691] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Created folder: Project (83304cfb5deb443880252c194e249565) in parent group-v496304. [ 707.159039] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Creating folder: Instances. Parent ref: group-v496434. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.159841] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6063791b-32a0-4de6-82be-485378d7b8ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.173546] env[62383]: DEBUG nova.compute.provider_tree [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.188473] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Created folder: Instances in parent group-v496434. [ 707.188675] env[62383]: DEBUG oslo.service.loopingcall [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 707.188863] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 707.190477] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91eb2f1a-1e3c-4d3f-b235-97d2cde7c7d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.215137] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.215137] env[62383]: value = "task-2451412" [ 707.215137] env[62383]: _type = "Task" [ 707.215137] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.226447] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451412, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.325308] env[62383]: DEBUG oslo_vmware.api [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451405, 'name': RemoveSnapshot_Task, 'duration_secs': 0.573981} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.325720] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 707.326048] env[62383]: INFO nova.compute.manager [None req-c9e79d34-beb5-43e0-89e0-492fe795fa58 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Took 15.04 seconds to snapshot the instance on the hypervisor. [ 707.355449] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451407, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.429973] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451408, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.454130] env[62383]: DEBUG nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 707.486374] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 707.486374] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 707.486374] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 707.486810] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 707.486810] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 707.486810] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 707.489625] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 707.489625] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 707.489625] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 707.489625] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 707.489625] env[62383]: DEBUG nova.virt.hardware [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 707.489990] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5f479f-8edb-4f17-84c5-321e3ef8af8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.500154] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4add2a-3ad3-44a2-9964-208c888c42fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.584645] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451409, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.679046] env[62383]: DEBUG nova.scheduler.client.report [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 707.729366] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451412, 'name': CreateVM_Task, 'duration_secs': 0.473005} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.729366] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.729366] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.729366] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.729366] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 707.730030] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c58cbc6f-dfef-494c-9c6a-a6732246504d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.736557] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 707.736557] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525ba35f-71ef-37be-e806-6b610dbcfce6" [ 707.736557] env[62383]: _type = "Task" [ 707.736557] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.751727] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525ba35f-71ef-37be-e806-6b610dbcfce6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.815987] env[62383]: DEBUG nova.network.neutron [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updated VIF entry in instance network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.815987] env[62383]: DEBUG nova.network.neutron [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updating instance_info_cache with network_info: [{"id": "c23968b2-dbec-433d-8bcc-80644a89ec08", "address": "fa:16:3e:3f:6c:37", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23968b2-db", "ovs_interfaceid": "c23968b2-dbec-433d-8bcc-80644a89ec08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.856847] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619612} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.857937] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. [ 707.858840] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708fd3a9-21bb-44c1-aa34-960e4de3534e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.887035] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 707.887671] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e536c5f0-6fd4-4d0d-a18f-9fcaca0cfdae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.907204] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 707.907204] env[62383]: value = "task-2451413" [ 707.907204] env[62383]: _type = "Task" [ 707.907204] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.916248] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451413, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.927866] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451408, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628084} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.928520] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a/2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 707.928520] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 707.928520] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c10bf97-e56a-431d-967d-192bc62947be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.938211] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 707.938211] env[62383]: value = "task-2451414" [ 707.938211] env[62383]: _type = "Task" [ 707.938211] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.947355] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451414, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.084757] env[62383]: DEBUG oslo_vmware.api [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451409, 'name': PowerOnVM_Task, 'duration_secs': 0.644334} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.085455] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 708.085455] env[62383]: INFO nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Took 9.05 seconds to spawn the instance on the hypervisor. [ 708.085455] env[62383]: DEBUG nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 708.086259] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be17ed85-7a9c-4b0a-ab7f-6203fbc5da11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.186274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.186633] env[62383]: DEBUG nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 708.189870] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.067s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.191435] env[62383]: INFO nova.compute.claims [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.252355] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525ba35f-71ef-37be-e806-6b610dbcfce6, 'name': SearchDatastore_Task, 'duration_secs': 0.012598} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.253027] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.253183] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.253484] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.253640] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.253908] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.254526] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-445ba0c5-9cbd-4bfa-8382-b3edc93b13a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.267399] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.267707] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.268807] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e20c0eb9-074d-4a37-a92c-097ce69c8990 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.275582] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 708.275582] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52dcd4c9-b950-5a84-6cf8-31703871548c" [ 708.275582] env[62383]: _type = "Task" [ 708.275582] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.286154] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52dcd4c9-b950-5a84-6cf8-31703871548c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.320632] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Releasing lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.320632] env[62383]: DEBUG nova.compute.manager [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Received event network-vif-plugged-daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 708.320632] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Acquiring lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.320885] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.320929] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.321228] env[62383]: DEBUG nova.compute.manager [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] No waiting events found dispatching network-vif-plugged-daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 708.321506] env[62383]: WARNING nova.compute.manager [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Received unexpected event network-vif-plugged-daaf9854-b852-4045-8380-ae136341958b for instance with vm_state building and task_state spawning. [ 708.321773] env[62383]: DEBUG nova.compute.manager [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Received event network-changed-daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 708.322046] env[62383]: DEBUG nova.compute.manager [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Refreshing instance network info cache due to event network-changed-daaf9854-b852-4045-8380-ae136341958b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 708.322358] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Acquiring lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.322616] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Acquired lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.322877] env[62383]: DEBUG nova.network.neutron [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Refreshing network info cache for port daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.331248] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.331487] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.419885] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451413, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.449486] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451414, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121174} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.450141] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 708.450975] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90abceed-1387-4822-b6bc-c17c0837b320 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.477052] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a/2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 708.477429] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13ed99f8-afc7-4577-8038-b1e9b2844518 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.502881] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 708.502881] env[62383]: value = "task-2451416" [ 708.502881] env[62383]: _type = "Task" [ 708.502881] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.511054] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451416, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.606959] env[62383]: INFO nova.compute.manager [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Took 56.82 seconds to build instance. [ 708.697504] env[62383]: DEBUG nova.compute.utils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 708.701701] env[62383]: DEBUG nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 708.702174] env[62383]: DEBUG nova.network.neutron [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 708.760372] env[62383]: DEBUG nova.policy [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65da5eabe26c4f7bba0795e7afeb6df5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '393ba5b56857422eaee92696b56dc23d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 708.787506] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52dcd4c9-b950-5a84-6cf8-31703871548c, 'name': SearchDatastore_Task, 'duration_secs': 0.012588} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.788347] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24de7423-4af8-4171-b577-aae610a7489f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.796272] env[62383]: DEBUG nova.network.neutron [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Successfully updated port: 9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 708.799248] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 708.799248] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a27fc-1c65-6a99-a2ea-cabd494606bc" [ 708.799248] env[62383]: _type = "Task" [ 708.799248] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.811541] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528a27fc-1c65-6a99-a2ea-cabd494606bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.900081] env[62383]: DEBUG nova.compute.manager [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 708.900796] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e48cc1f-ae49-41b7-a6c4-99d4932c638c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.936437] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451413, 'name': ReconfigVM_Task, 'duration_secs': 0.608943} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.937173] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Reconfigured VM instance instance-00000028 to attach disk [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 708.938207] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1998b90e-0e6f-4cf1-ad07-f9951a1c23b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.972745] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b6089d2-7fd5-487e-875d-28961247440b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.993732] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 708.993732] env[62383]: value = "task-2451417" [ 708.993732] env[62383]: _type = "Task" [ 708.993732] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.013902] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451417, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.020704] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451416, 'name': ReconfigVM_Task, 'duration_secs': 0.504664} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.026812] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a/2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.026812] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8334dea-73a5-4e33-9d39-3f2325a46ce6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.033235] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 709.033235] env[62383]: value = "task-2451418" [ 709.033235] env[62383]: _type = "Task" [ 709.033235] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.043329] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451418, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.102760] env[62383]: DEBUG nova.network.neutron [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Successfully created port: 73ec5d4d-c675-4804-a31f-e92bdc8286fd {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 709.109510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b13e774-3a91-4365-8a0d-cc3f8a9e410f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.095s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.139789] env[62383]: DEBUG nova.network.neutron [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Updated VIF entry in instance network info cache for port daaf9854-b852-4045-8380-ae136341958b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.139789] env[62383]: DEBUG nova.network.neutron [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Updating instance_info_cache with network_info: [{"id": "daaf9854-b852-4045-8380-ae136341958b", "address": "fa:16:3e:89:d7:1f", "network": {"id": "b0fc25d0-9d02-4e96-be67-5f3c47804b3c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1568827313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b99120a31cb46348ef76f6aea1e26cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaaf9854-b8", "ovs_interfaceid": "daaf9854-b852-4045-8380-ae136341958b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.202956] env[62383]: DEBUG nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 709.302510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.302568] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquired lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.302710] env[62383]: DEBUG nova.network.neutron [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 709.321411] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528a27fc-1c65-6a99-a2ea-cabd494606bc, 'name': SearchDatastore_Task, 'duration_secs': 0.012453} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.325215] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.325499] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 67f05a2b-f323-4e4a-ac13-7f4745593be0/67f05a2b-f323-4e4a-ac13-7f4745593be0.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 709.327343] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Received event network-vif-plugged-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 709.330468] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Acquiring lock "67f05a2b-f323-4e4a-ac13-7f4745593be0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.330468] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.330468] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.330468] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] No waiting events found dispatching network-vif-plugged-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.330468] env[62383]: WARNING nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Received unexpected event network-vif-plugged-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd for instance with vm_state building and task_state spawning. [ 709.330725] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Received event network-changed-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 709.330725] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Refreshing instance network info cache due to event network-changed-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 709.330725] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Acquiring lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.330725] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Acquired lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.330725] env[62383]: DEBUG nova.network.neutron [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Refreshing network info cache for port d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 709.332774] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5256f0ab-7bb2-44b3-b37f-affe2b942305 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.342577] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 709.342577] env[62383]: value = "task-2451419" [ 709.342577] env[62383]: _type = "Task" [ 709.342577] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.356609] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.435021] env[62383]: INFO nova.compute.manager [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] instance snapshotting [ 709.435021] env[62383]: DEBUG nova.objects.instance [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'flavor' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 709.512172] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451417, 'name': ReconfigVM_Task, 'duration_secs': 0.277876} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.512172] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 709.512172] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e345d55b-917a-4de7-8e54-ac2625a38d5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.521973] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 709.521973] env[62383]: value = "task-2451420" [ 709.521973] env[62383]: _type = "Task" [ 709.521973] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.531183] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451420, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.543955] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451418, 'name': Rename_Task, 'duration_secs': 0.236717} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.544294] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 709.544577] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e15c78fc-c683-447e-9ed6-ab63302c4a14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.558147] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 709.558147] env[62383]: value = "task-2451421" [ 709.558147] env[62383]: _type = "Task" [ 709.558147] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.575535] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.612578] env[62383]: DEBUG nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.643465] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e494bc7-4ea9-45d2-a181-f530f6e5bcc0 req-4f3ef651-88c6-4e9b-bc42-83419aa76190 service nova] Releasing lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.851034] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ff135a-a038-4bbd-94ae-ba426bcf896a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.857651] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451419, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.864428] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efa3f96-042f-4c9e-b5e7-c6dfb9d93b9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.903021] env[62383]: DEBUG nova.network.neutron [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.905475] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2ff8c9-89da-4f33-ab2a-0086b356d719 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.915829] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dd5ddc-e113-4e4a-ab4f-fffd1f4a030e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.937025] env[62383]: DEBUG nova.compute.provider_tree [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 709.940070] env[62383]: DEBUG nova.compute.manager [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 709.941184] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172ce5cd-bb98-4a05-a1f6-be11d9796405 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.944534] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63001eeb-d149-49de-ac37-6d1ed6b70dec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.974830] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a91dc88-93e4-47bb-8959-730008655f6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.034155] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451420, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.069344] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451421, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.101653] env[62383]: DEBUG nova.network.neutron [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Updated VIF entry in instance network info cache for port d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 710.102042] env[62383]: DEBUG nova.network.neutron [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Updating instance_info_cache with network_info: [{"id": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "address": "fa:16:3e:ed:c5:82", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2ef4e8d-94", "ovs_interfaceid": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.139320] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.217154] env[62383]: DEBUG nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 710.223048] env[62383]: DEBUG nova.network.neutron [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Updating instance_info_cache with network_info: [{"id": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "address": "fa:16:3e:15:4a:e6", "network": {"id": "024ab25b-2360-4443-a441-3172fd2cf74b", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-183456731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e676769a71c843b6966b648ef3525fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff3b35c-c2", "ovs_interfaceid": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.247224] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 710.247513] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.247721] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 710.247937] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.248084] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 710.248586] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 710.249035] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 710.249164] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 710.249337] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 710.249714] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 710.250013] env[62383]: DEBUG nova.virt.hardware [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 710.251392] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e816fd2c-1ba2-4e57-827b-8bbde0b3be07 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.258426] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 710.258583] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496433', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'name': 'volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80821717-f961-49c7-8b79-c152edfdfb94', 'attached_at': '', 'detached_at': '', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'serial': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 710.260232] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49b2eec-420b-484c-82f7-b4a050ef2f4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.269540] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3d08da-1131-447f-bf4a-f4c2735a7e68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.286115] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93072d9-5859-48f8-8a99-cf1e02d422cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.328691] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e/volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.329107] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b408599-b4f6-43f1-9d2d-127763d887b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.350644] env[62383]: DEBUG oslo_vmware.api [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 710.350644] env[62383]: value = "task-2451422" [ 710.350644] env[62383]: _type = "Task" [ 710.350644] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.354287] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451419, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55771} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.358000] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 67f05a2b-f323-4e4a-ac13-7f4745593be0/67f05a2b-f323-4e4a-ac13-7f4745593be0.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 710.358273] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.358537] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7719daf-1f56-47e1-8b18-023098fdad2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.366378] env[62383]: DEBUG oslo_vmware.api [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451422, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.367839] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 710.367839] env[62383]: value = "task-2451423" [ 710.367839] env[62383]: _type = "Task" [ 710.367839] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.380140] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451423, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.461873] env[62383]: ERROR nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [req-9fbe94b8-ca59-4617-93c5-7f97174aa111] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9fbe94b8-ca59-4617-93c5-7f97174aa111"}]} [ 710.475139] env[62383]: INFO nova.compute.manager [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] instance snapshotting [ 710.477595] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 710.480677] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48cd3cb7-b1f7-4e5c-bff9-5727e7814371 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.502719] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 710.504054] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 710.504703] env[62383]: DEBUG nova.compute.provider_tree [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 710.507186] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-13922239-be51-444a-bf77-7b23174549ff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.509250] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fccf1dfc-d406-46e1-b249-a21a513a33ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.523281] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 710.523281] env[62383]: value = "task-2451424" [ 710.523281] env[62383]: _type = "Task" [ 710.523281] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.531726] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 710.550940] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451424, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.551354] env[62383]: DEBUG oslo_vmware.api [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451420, 'name': PowerOnVM_Task, 'duration_secs': 0.699872} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.553922] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.556854] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 710.560186] env[62383]: DEBUG nova.compute.manager [None req-2328c308-9704-4188-981a-4a13d1f5370c tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 710.564475] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a1ad6b-eabb-464a-b0c3-fc3a2073404e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.570267] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5228533e-c36d-c0c5-2bf4-7dc125f7de75/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 710.570828] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2492dfe-980f-4fa2-a1ea-21e743ad9aa4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.580398] env[62383]: DEBUG oslo_vmware.api [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451421, 'name': PowerOnVM_Task, 'duration_secs': 0.705543} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.583748] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.583748] env[62383]: INFO nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Took 8.76 seconds to spawn the instance on the hypervisor. [ 710.584374] env[62383]: DEBUG nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 710.584374] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5228533e-c36d-c0c5-2bf4-7dc125f7de75/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 710.584508] env[62383]: ERROR oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5228533e-c36d-c0c5-2bf4-7dc125f7de75/disk-0.vmdk due to incomplete transfer. [ 710.587718] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f2c9f4-1bdb-4fdc-b9e3-8968911e050c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.590249] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2422f60e-2502-4767-af14-e782a2e103da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.606889] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Releasing lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.609914] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Received event network-vif-plugged-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 710.609914] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Acquiring lock "8bd05dac-7aa2-44c5-8752-6045c01d213d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.609914] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.609914] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.609914] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] No waiting events found dispatching network-vif-plugged-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 710.610166] env[62383]: WARNING nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Received unexpected event network-vif-plugged-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 for instance with vm_state building and task_state spawning. [ 710.610166] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Received event network-changed-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 710.610166] env[62383]: DEBUG nova.compute.manager [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Refreshing instance network info cache due to event network-changed-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 710.610166] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Acquiring lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.610166] env[62383]: DEBUG oslo_vmware.rw_handles [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5228533e-c36d-c0c5-2bf4-7dc125f7de75/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 710.610322] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Uploaded image 35aacbb9-9f29-4179-83e7-370081c601df to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 710.612546] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 710.612652] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1c2ffdff-f735-4ae3-a635-131ae0a62fa6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.621211] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 710.621211] env[62383]: value = "task-2451425" [ 710.621211] env[62383]: _type = "Task" [ 710.621211] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.636965] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451425, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.707492] env[62383]: DEBUG nova.network.neutron [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Successfully updated port: 73ec5d4d-c675-4804-a31f-e92bdc8286fd {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 710.733186] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Releasing lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.733936] env[62383]: DEBUG nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Instance network_info: |[{"id": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "address": "fa:16:3e:15:4a:e6", "network": {"id": "024ab25b-2360-4443-a441-3172fd2cf74b", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-183456731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e676769a71c843b6966b648ef3525fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff3b35c-c2", "ovs_interfaceid": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 710.734140] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Acquired lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.734331] env[62383]: DEBUG nova.network.neutron [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Refreshing network info cache for port 9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 710.735467] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:4a:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0467beaa-08c6-44d6-b8a2-e9c609c21ff4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 710.743376] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Creating folder: Project (e676769a71c843b6966b648ef3525fee). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 710.748894] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38673e1e-1abd-4cc3-8956-360ea9e9733e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.762478] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Created folder: Project (e676769a71c843b6966b648ef3525fee) in parent group-v496304. [ 710.762819] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Creating folder: Instances. Parent ref: group-v496437. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 710.763119] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab66e649-7047-421e-8324-123bc1c60145 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.779540] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Created folder: Instances in parent group-v496437. [ 710.779970] env[62383]: DEBUG oslo.service.loopingcall [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.780208] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 710.780431] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-038c7922-0a54-4ff4-87a4-4719bf0d66a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.808285] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 710.808285] env[62383]: value = "task-2451428" [ 710.808285] env[62383]: _type = "Task" [ 710.808285] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.818598] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451428, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.864644] env[62383]: DEBUG oslo_vmware.api [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451422, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.879703] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451423, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080022} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.879766] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.880837] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bed66a-6747-435d-8b64-4aa6231a34f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.912691] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 67f05a2b-f323-4e4a-ac13-7f4745593be0/67f05a2b-f323-4e4a-ac13-7f4745593be0.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.918096] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e531523e-94cd-4ce9-b98d-7a99bc75146c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.941077] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 710.941077] env[62383]: value = "task-2451429" [ 710.941077] env[62383]: _type = "Task" [ 710.941077] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.949238] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451429, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.025521] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 711.025997] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2d1841fc-86c7-402a-8457-5ded45ca92be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.046754] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451424, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.049052] env[62383]: DEBUG nova.network.neutron [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Updated VIF entry in instance network info cache for port 9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 711.049442] env[62383]: DEBUG nova.network.neutron [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Updating instance_info_cache with network_info: [{"id": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "address": "fa:16:3e:15:4a:e6", "network": {"id": "024ab25b-2360-4443-a441-3172fd2cf74b", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-183456731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e676769a71c843b6966b648ef3525fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff3b35c-c2", "ovs_interfaceid": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.050935] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 711.050935] env[62383]: value = "task-2451430" [ 711.050935] env[62383]: _type = "Task" [ 711.050935] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.068302] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451430, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.073509] env[62383]: DEBUG nova.compute.manager [req-8d6b230c-e4c2-4ada-82ea-26ac84b9aa22 req-b1df49d2-a69c-405f-bc4c-e8b8942280b7 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Received event network-vif-plugged-73ec5d4d-c675-4804-a31f-e92bdc8286fd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 711.073677] env[62383]: DEBUG oslo_concurrency.lockutils [req-8d6b230c-e4c2-4ada-82ea-26ac84b9aa22 req-b1df49d2-a69c-405f-bc4c-e8b8942280b7 service nova] Acquiring lock "583138d1-f928-4e33-a443-11c627203c44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.073913] env[62383]: DEBUG oslo_concurrency.lockutils [req-8d6b230c-e4c2-4ada-82ea-26ac84b9aa22 req-b1df49d2-a69c-405f-bc4c-e8b8942280b7 service nova] Lock "583138d1-f928-4e33-a443-11c627203c44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.074093] env[62383]: DEBUG oslo_concurrency.lockutils [req-8d6b230c-e4c2-4ada-82ea-26ac84b9aa22 req-b1df49d2-a69c-405f-bc4c-e8b8942280b7 service nova] Lock "583138d1-f928-4e33-a443-11c627203c44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.074696] env[62383]: DEBUG nova.compute.manager [req-8d6b230c-e4c2-4ada-82ea-26ac84b9aa22 req-b1df49d2-a69c-405f-bc4c-e8b8942280b7 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] No waiting events found dispatching network-vif-plugged-73ec5d4d-c675-4804-a31f-e92bdc8286fd {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 711.074696] env[62383]: WARNING nova.compute.manager [req-8d6b230c-e4c2-4ada-82ea-26ac84b9aa22 req-b1df49d2-a69c-405f-bc4c-e8b8942280b7 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Received unexpected event network-vif-plugged-73ec5d4d-c675-4804-a31f-e92bdc8286fd for instance with vm_state building and task_state spawning. [ 711.123176] env[62383]: INFO nova.compute.manager [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Took 52.17 seconds to build instance. [ 711.139123] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451425, 'name': Destroy_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.211055] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.211215] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquired lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.211361] env[62383]: DEBUG nova.network.neutron [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.265809] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f04626-6da2-4b77-971c-6c6823c1a2a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.276598] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bec380d-ba95-469f-8ae8-f58c4492afd9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.318778] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef122ff5-9c3a-448c-8f08-4a6837d71b61 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.328049] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451428, 'name': CreateVM_Task, 'duration_secs': 0.453598} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.330404] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 711.331339] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.331513] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.331848] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 711.333807] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732e76ff-dd7f-415f-b6b2-e0e8c315f1ee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.337835] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5e02bcf-36f3-4e0e-a610-61727c74e1cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.344035] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 711.344035] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d42b40-f8be-f7e4-83e6-a5bb4a059935" [ 711.344035] env[62383]: _type = "Task" [ 711.344035] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.353131] env[62383]: DEBUG nova.compute.provider_tree [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.368028] env[62383]: DEBUG oslo_vmware.api [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451422, 'name': ReconfigVM_Task, 'duration_secs': 0.720628} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.371634] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Reconfigured VM instance instance-00000011 to attach disk [datastore2] volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e/volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.377016] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d42b40-f8be-f7e4-83e6-a5bb4a059935, 'name': SearchDatastore_Task, 'duration_secs': 0.012992} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.377261] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-154d982a-2f69-4998-9af3-353abdf693ac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.387700] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.387963] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 711.388182] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.388330] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.388510] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 711.389163] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c4d2f86-19fb-4555-a946-4cb0ab186164 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.399234] env[62383]: DEBUG oslo_vmware.api [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 711.399234] env[62383]: value = "task-2451431" [ 711.399234] env[62383]: _type = "Task" [ 711.399234] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.399906] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 711.400352] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 711.405631] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a4a9e9e-7055-4b12-b72b-9567df8e3423 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.413786] env[62383]: DEBUG oslo_vmware.api [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451431, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.416032] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 711.416032] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52218d93-3d24-166b-9705-250dd667db71" [ 711.416032] env[62383]: _type = "Task" [ 711.416032] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.430026] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52218d93-3d24-166b-9705-250dd667db71, 'name': SearchDatastore_Task, 'duration_secs': 0.00996} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.430026] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef13b4c2-ea74-4e6d-9559-efb3f28150ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.433580] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 711.433580] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1f05b-0145-f5ce-4e5f-101368800c4f" [ 711.433580] env[62383]: _type = "Task" [ 711.433580] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.444604] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1f05b-0145-f5ce-4e5f-101368800c4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.454329] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451429, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.540146] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451424, 'name': CreateSnapshot_Task, 'duration_secs': 0.816196} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.540338] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 711.541398] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf79822f-ebc5-4376-b657-fb7256c39bf0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.557375] env[62383]: DEBUG oslo_concurrency.lockutils [req-f78f41e9-3953-4c93-8133-a2ae51a559bc req-1a766c01-7ebd-4668-8623-5ea430658e91 service nova] Releasing lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.565046] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451430, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.629557] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2116d7b-42c0-4e78-b6c9-e3fbe23b2b78 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.973s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.637051] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451425, 'name': Destroy_Task, 'duration_secs': 0.549223} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.637315] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Destroyed the VM [ 711.637556] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 711.637828] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9e8b2a75-f06c-45b5-8489-9e3bf06cf439 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.645781] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 711.645781] env[62383]: value = "task-2451432" [ 711.645781] env[62383]: _type = "Task" [ 711.645781] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.655976] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451432, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.755735] env[62383]: DEBUG nova.network.neutron [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.878759] env[62383]: ERROR nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [req-14ed43fa-2418-4224-a4d6-c86fdeeeb559] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-14ed43fa-2418-4224-a4d6-c86fdeeeb559"}]} [ 711.899493] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 711.913618] env[62383]: DEBUG oslo_vmware.api [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451431, 'name': ReconfigVM_Task, 'duration_secs': 0.159451} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.914088] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496433', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'name': 'volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80821717-f961-49c7-8b79-c152edfdfb94', 'attached_at': '', 'detached_at': '', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'serial': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 711.921381] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 711.921646] env[62383]: DEBUG nova.compute.provider_tree [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.941753] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 711.951896] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1f05b-0145-f5ce-4e5f-101368800c4f, 'name': SearchDatastore_Task, 'duration_secs': 0.014791} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.952560] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.953023] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 8bd05dac-7aa2-44c5-8752-6045c01d213d/8bd05dac-7aa2-44c5-8752-6045c01d213d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.953532] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad63fcab-e882-4a4a-817c-da6b56bc07b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.960143] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451429, 'name': ReconfigVM_Task, 'duration_secs': 0.655447} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.960584] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 711.964230] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 67f05a2b-f323-4e4a-ac13-7f4745593be0/67f05a2b-f323-4e4a-ac13-7f4745593be0.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.964713] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cd17e3c-d8e6-4ad5-b781-a1796f0734aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.969851] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 711.969851] env[62383]: value = "task-2451433" [ 711.969851] env[62383]: _type = "Task" [ 711.969851] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.974302] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 711.974302] env[62383]: value = "task-2451434" [ 711.974302] env[62383]: _type = "Task" [ 711.974302] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.983114] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451433, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.989125] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451434, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.065657] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 712.065933] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-62786a72-0b7f-4961-8bf1-25138845ba65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.085230] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.085538] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.095664] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451430, 'name': CreateSnapshot_Task, 'duration_secs': 0.899885} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.095893] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 712.095893] env[62383]: value = "task-2451435" [ 712.095893] env[62383]: _type = "Task" [ 712.095893] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.096142] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 712.097063] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3085f6a4-b275-460e-85c5-e3a2c5a0f1db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.117088] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451435, 'name': CloneVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.132807] env[62383]: DEBUG nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 712.164124] env[62383]: DEBUG oslo_vmware.api [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451432, 'name': RemoveSnapshot_Task, 'duration_secs': 0.351227} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.164465] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 712.165106] env[62383]: INFO nova.compute.manager [None req-d50f4928-2987-409a-b0ae-c41334e0f559 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Took 17.66 seconds to snapshot the instance on the hypervisor. [ 712.172881] env[62383]: DEBUG nova.network.neutron [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Updating instance_info_cache with network_info: [{"id": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "address": "fa:16:3e:69:c0:d4", "network": {"id": "a6cf92ab-f9be-4021-bd8d-9e4b246d8dc1", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1324527743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "393ba5b56857422eaee92696b56dc23d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73ec5d4d-c6", "ovs_interfaceid": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.326530] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "872ac212-9f29-426d-94c7-e1bf73aebd94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.326814] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.327135] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "872ac212-9f29-426d-94c7-e1bf73aebd94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.327472] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.328604] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.335114] env[62383]: INFO nova.compute.manager [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Terminating instance [ 712.489352] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451433, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.494467] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451434, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.619895] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451435, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.628963] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 712.632016] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-74d4165f-b443-471b-b1c7-d2f5e24e846d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.642039] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 712.642039] env[62383]: value = "task-2451436" [ 712.642039] env[62383]: _type = "Task" [ 712.642039] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.654785] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451436, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.660719] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.680932] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Releasing lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.682166] env[62383]: DEBUG nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Instance network_info: |[{"id": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "address": "fa:16:3e:69:c0:d4", "network": {"id": "a6cf92ab-f9be-4021-bd8d-9e4b246d8dc1", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1324527743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "393ba5b56857422eaee92696b56dc23d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73ec5d4d-c6", "ovs_interfaceid": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 712.683153] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:c0:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92233552-2c0c-416e-9bf3-bfcca8eda2dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73ec5d4d-c675-4804-a31f-e92bdc8286fd', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 712.692106] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Creating folder: Project (393ba5b56857422eaee92696b56dc23d). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.692992] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f03424c2-4090-44a1-904a-edfa0e865db0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.707239] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Created folder: Project (393ba5b56857422eaee92696b56dc23d) in parent group-v496304. [ 712.707458] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Creating folder: Instances. Parent ref: group-v496444. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.707754] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee77970b-267a-4570-a9c4-596e2914d096 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.719238] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Created folder: Instances in parent group-v496444. [ 712.719573] env[62383]: DEBUG oslo.service.loopingcall [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.719808] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 583138d1-f928-4e33-a443-11c627203c44] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 712.720076] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d412c6b3-6d59-4f14-af9a-bd8772c4122d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.737999] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.738254] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.738449] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.738644] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.738819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.741215] env[62383]: INFO nova.compute.manager [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Terminating instance [ 712.746099] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 712.746099] env[62383]: value = "task-2451439" [ 712.746099] env[62383]: _type = "Task" [ 712.746099] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.752948] env[62383]: DEBUG nova.compute.manager [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Received event network-changed-daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 712.753166] env[62383]: DEBUG nova.compute.manager [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Refreshing instance network info cache due to event network-changed-daaf9854-b852-4045-8380-ae136341958b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 712.753383] env[62383]: DEBUG oslo_concurrency.lockutils [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] Acquiring lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.753523] env[62383]: DEBUG oslo_concurrency.lockutils [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] Acquired lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.753707] env[62383]: DEBUG nova.network.neutron [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Refreshing network info cache for port daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.757186] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c35897-ffc3-43ba-b872-62b751abad8a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.764109] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451439, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.770514] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4830ec7e-d1c9-423e-b48f-44e697574a9f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.809262] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88a23ee-e7a6-4e69-924b-5fb0a35f6deb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.819452] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d05498-6983-4619-8c8d-4b7d30da991f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.849185] env[62383]: DEBUG nova.compute.manager [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 712.849185] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 712.851523] env[62383]: DEBUG nova.compute.provider_tree [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 712.851523] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8406ce09-ec4b-40d8-a626-d1e092545f02 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.862673] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 712.862673] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a26687e0-3a61-4d97-85fd-33cacaf741ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.870667] env[62383]: DEBUG oslo_vmware.api [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 712.870667] env[62383]: value = "task-2451440" [ 712.870667] env[62383]: _type = "Task" [ 712.870667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.880754] env[62383]: DEBUG oslo_vmware.api [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.924155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "a27fcace-4fb3-48fb-946d-b8057f6ee601" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.924155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.970928] env[62383]: DEBUG nova.objects.instance [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lazy-loading 'flavor' on Instance uuid 80821717-f961-49c7-8b79-c152edfdfb94 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 712.992291] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451433, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524429} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.996073] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 8bd05dac-7aa2-44c5-8752-6045c01d213d/8bd05dac-7aa2-44c5-8752-6045c01d213d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.996423] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.997704] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451434, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.997973] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf828be4-b76b-4bd5-8076-4f6294b78660 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.008405] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 713.008405] env[62383]: value = "task-2451441" [ 713.008405] env[62383]: _type = "Task" [ 713.008405] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.021681] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451441, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.083264] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "0c01a974-2318-461b-965f-ba4932e3bea1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.083666] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.083914] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "0c01a974-2318-461b-965f-ba4932e3bea1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.084130] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.084301] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.086516] env[62383]: INFO nova.compute.manager [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Terminating instance [ 713.113561] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451435, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.155641] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451436, 'name': CloneVM_Task} progress is 93%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.246464] env[62383]: DEBUG nova.compute.manager [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 713.246900] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.247947] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc708f42-00fc-4180-bd3e-24c28bf20dd4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.263722] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.267640] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26acc116-55d4-4337-8250-39069bdaa04d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.269862] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451439, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.277484] env[62383]: DEBUG oslo_vmware.api [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 713.277484] env[62383]: value = "task-2451442" [ 713.277484] env[62383]: _type = "Task" [ 713.277484] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.291177] env[62383]: DEBUG oslo_vmware.api [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.378755] env[62383]: ERROR nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [req-705b3ecb-5071-4ac9-9dca-a73a2658a550] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-705b3ecb-5071-4ac9-9dca-a73a2658a550"}]} [ 713.384603] env[62383]: DEBUG oslo_vmware.api [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451440, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.399534] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 713.417907] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 713.418121] env[62383]: DEBUG nova.compute.provider_tree [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 713.434195] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 713.439334] env[62383]: DEBUG nova.compute.manager [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Received event network-changed-73ec5d4d-c675-4804-a31f-e92bdc8286fd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 713.439742] env[62383]: DEBUG nova.compute.manager [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Refreshing instance network info cache due to event network-changed-73ec5d4d-c675-4804-a31f-e92bdc8286fd. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 713.439742] env[62383]: DEBUG oslo_concurrency.lockutils [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] Acquiring lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.439932] env[62383]: DEBUG oslo_concurrency.lockutils [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] Acquired lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.440223] env[62383]: DEBUG nova.network.neutron [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Refreshing network info cache for port 73ec5d4d-c675-4804-a31f-e92bdc8286fd {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.457382] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 713.491719] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb11331b-1a1e-4b14-a3d8-e30e2a4c8b9b tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.863s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.501701] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451434, 'name': Rename_Task, 'duration_secs': 1.379215} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.501701] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 713.501989] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cff4d24-58b8-4d8e-b932-309f7d038502 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.517670] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 713.517670] env[62383]: value = "task-2451443" [ 713.517670] env[62383]: _type = "Task" [ 713.517670] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.529324] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451441, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.256088} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.530109] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 713.530918] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c04eb04-a307-4b99-b9e6-c5989cb4fe93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.542348] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451443, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.544853] env[62383]: DEBUG nova.network.neutron [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Updated VIF entry in instance network info cache for port daaf9854-b852-4045-8380-ae136341958b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 713.545301] env[62383]: DEBUG nova.network.neutron [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Updating instance_info_cache with network_info: [{"id": "daaf9854-b852-4045-8380-ae136341958b", "address": "fa:16:3e:89:d7:1f", "network": {"id": "b0fc25d0-9d02-4e96-be67-5f3c47804b3c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1568827313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3b99120a31cb46348ef76f6aea1e26cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15922696-dc08-44ef-97be-0b09a9dfeae8", "external-id": "nsx-vlan-transportzone-791", "segmentation_id": 791, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdaaf9854-b8", "ovs_interfaceid": "daaf9854-b852-4045-8380-ae136341958b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.567367] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 8bd05dac-7aa2-44c5-8752-6045c01d213d/8bd05dac-7aa2-44c5-8752-6045c01d213d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 713.575028] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40b2f028-5f4c-4f64-9b89-64990b7cc531 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.590399] env[62383]: DEBUG nova.compute.manager [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 713.590597] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.592106] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3307e3b6-af34-4070-9e3d-240eb7bb3901 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.598268] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 713.598268] env[62383]: value = "task-2451444" [ 713.598268] env[62383]: _type = "Task" [ 713.598268] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.608273] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.611849] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e5a1c78-0d80-484d-a417-42dd6d12b01a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.620748] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451444, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.626472] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451435, 'name': CloneVM_Task} progress is 95%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.660960] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451436, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.711308] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.711670] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.712511] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleting the datastore file [datastore2] 0c01a974-2318-461b-965f-ba4932e3bea1 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.712511] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dd90b89-627a-4237-a74a-b904d01b850b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.720176] env[62383]: DEBUG oslo_vmware.api [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 713.720176] env[62383]: value = "task-2451446" [ 713.720176] env[62383]: _type = "Task" [ 713.720176] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.733468] env[62383]: DEBUG oslo_vmware.api [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.756993] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451439, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.794198] env[62383]: DEBUG oslo_vmware.api [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451442, 'name': PowerOffVM_Task, 'duration_secs': 0.319314} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.794305] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.794399] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.797117] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f3d15f2-4c40-444a-ba1c-ee36b51f3982 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.867220] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.867466] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.867724] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleting the datastore file [datastore2] 6b5daa17-ad4a-4b30-a1fe-083a1a238667 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.868087] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b487b08b-9352-4c54-90a6-cdf1c32ec506 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.876994] env[62383]: DEBUG oslo_vmware.api [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 713.876994] env[62383]: value = "task-2451448" [ 713.876994] env[62383]: _type = "Task" [ 713.876994] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.886502] env[62383]: DEBUG oslo_vmware.api [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451440, 'name': PowerOffVM_Task, 'duration_secs': 0.520921} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.887290] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.887451] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.887720] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e9ce39b-a80c-4123-961d-de1ea8411ec1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.892488] env[62383]: DEBUG oslo_vmware.api [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.982696] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.982932] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.983155] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleting the datastore file [datastore2] 872ac212-9f29-426d-94c7-e1bf73aebd94 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.983437] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a367c910-17e8-4f9c-aaba-c8bae5171aae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.995146] env[62383]: DEBUG oslo_vmware.api [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for the task: (returnval){ [ 713.995146] env[62383]: value = "task-2451450" [ 713.995146] env[62383]: _type = "Task" [ 713.995146] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.008249] env[62383]: DEBUG oslo_vmware.api [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.027824] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.028230] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.042452] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451443, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.059445] env[62383]: DEBUG oslo_concurrency.lockutils [req-400d7249-b3ff-4964-a470-2b27f5b90088 req-adbabf9a-77c5-4b33-a8b4-acb3177e4b6f service nova] Releasing lock "refresh_cache-2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.089936] env[62383]: DEBUG oslo_concurrency.lockutils [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "80821717-f961-49c7-8b79-c152edfdfb94" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.091137] env[62383]: DEBUG oslo_concurrency.lockutils [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.117603] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.129236] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451435, 'name': CloneVM_Task, 'duration_secs': 1.660932} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.133945] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Created linked-clone VM from snapshot [ 714.135638] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0622dcb-bd89-4fe6-ad6f-5253f38702c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.146296] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Uploading image 2c35654f-cf3e-45f8-bb13-ed76aec806dc {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 714.163713] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451436, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.174460] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 714.174460] env[62383]: value = "vm-496442" [ 714.174460] env[62383]: _type = "VirtualMachine" [ 714.174460] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 714.174823] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bac7ac7f-994a-405f-8319-46e453277564 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.184050] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3a7282-df84-4577-94c6-13a51c74dd74 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.188986] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease: (returnval){ [ 714.188986] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524b5ced-0bfa-7d0b-9206-25099297d6b3" [ 714.188986] env[62383]: _type = "HttpNfcLease" [ 714.188986] env[62383]: } obtained for exporting VM: (result){ [ 714.188986] env[62383]: value = "vm-496442" [ 714.188986] env[62383]: _type = "VirtualMachine" [ 714.188986] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 714.189309] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the lease: (returnval){ [ 714.189309] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524b5ced-0bfa-7d0b-9206-25099297d6b3" [ 714.189309] env[62383]: _type = "HttpNfcLease" [ 714.189309] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 714.196685] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e0a285-1bef-495d-bde1-f54f3231b643 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.201491] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 714.201491] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524b5ced-0bfa-7d0b-9206-25099297d6b3" [ 714.201491] env[62383]: _type = "HttpNfcLease" [ 714.201491] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 714.202904] env[62383]: DEBUG nova.network.neutron [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Updated VIF entry in instance network info cache for port 73ec5d4d-c675-4804-a31f-e92bdc8286fd. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 714.203252] env[62383]: DEBUG nova.network.neutron [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Updating instance_info_cache with network_info: [{"id": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "address": "fa:16:3e:69:c0:d4", "network": {"id": "a6cf92ab-f9be-4021-bd8d-9e4b246d8dc1", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1324527743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "393ba5b56857422eaee92696b56dc23d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73ec5d4d-c6", "ovs_interfaceid": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.238242] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8423bc-2374-4328-ace1-3ed0f0d9f4a0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.251593] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df09a21d-149f-4fbf-aa9b-592743ec4497 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.256959] env[62383]: DEBUG oslo_vmware.api [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192775} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.260618] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.260921] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.261160] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.261382] env[62383]: INFO nova.compute.manager [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Took 0.67 seconds to destroy the instance on the hypervisor. [ 714.261650] env[62383]: DEBUG oslo.service.loopingcall [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.262354] env[62383]: DEBUG nova.compute.manager [-] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 714.262468] env[62383]: DEBUG nova.network.neutron [-] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.276029] env[62383]: DEBUG nova.compute.provider_tree [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.280804] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451439, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.391568] env[62383]: DEBUG oslo_vmware.api [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160779} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.391822] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.392011] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.392198] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.392373] env[62383]: INFO nova.compute.manager [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Took 1.15 seconds to destroy the instance on the hypervisor. [ 714.392606] env[62383]: DEBUG oslo.service.loopingcall [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.392793] env[62383]: DEBUG nova.compute.manager [-] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 714.392887] env[62383]: DEBUG nova.network.neutron [-] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.508857] env[62383]: DEBUG oslo_vmware.api [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Task: {'id': task-2451450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162951} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.509189] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.509469] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.509702] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.509889] env[62383]: INFO nova.compute.manager [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Took 1.66 seconds to destroy the instance on the hypervisor. [ 714.510155] env[62383]: DEBUG oslo.service.loopingcall [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.510349] env[62383]: DEBUG nova.compute.manager [-] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 714.510448] env[62383]: DEBUG nova.network.neutron [-] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.529273] env[62383]: DEBUG oslo_vmware.api [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451443, 'name': PowerOnVM_Task, 'duration_secs': 0.702836} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.529541] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 714.530447] env[62383]: INFO nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Took 9.97 seconds to spawn the instance on the hypervisor. [ 714.530447] env[62383]: DEBUG nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 714.530710] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61c0d21-a93b-47d6-9dae-cc0680256c77 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.594633] env[62383]: INFO nova.compute.manager [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Detaching volume 789417f0-0a0f-41c6-9067-c1e9bc8fd22e [ 714.615567] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451444, 'name': ReconfigVM_Task, 'duration_secs': 0.524186} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.615652] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 8bd05dac-7aa2-44c5-8752-6045c01d213d/8bd05dac-7aa2-44c5-8752-6045c01d213d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.616300] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8590697-6c1b-4622-8c58-56a662d3bc7a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.624342] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 714.624342] env[62383]: value = "task-2451452" [ 714.624342] env[62383]: _type = "Task" [ 714.624342] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.642036] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451452, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.646379] env[62383]: INFO nova.virt.block_device [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Attempting to driver detach volume 789417f0-0a0f-41c6-9067-c1e9bc8fd22e from mountpoint /dev/sdb [ 714.646510] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 714.646933] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496433', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'name': 'volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80821717-f961-49c7-8b79-c152edfdfb94', 'attached_at': '', 'detached_at': '', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'serial': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 714.647543] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a366acc-1f4b-496d-b35b-a9bfdeee244c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.395387] env[62383]: DEBUG oslo_concurrency.lockutils [req-481f3557-b112-43ec-9f9e-035e5d577999 req-45f8664c-aa1f-4293-8c19-b2f16d623398 service nova] Releasing lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.396447] env[62383]: DEBUG nova.scheduler.client.report [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.399800] env[62383]: DEBUG nova.network.neutron [-] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.405843] env[62383]: DEBUG nova.network.neutron [-] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.418310] env[62383]: DEBUG nova.compute.manager [req-b0ac8e9c-72d0-48df-bf11-2820c50f137e req-96dbdabb-1366-4ee1-8e38-dbbdcf31b012 service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Received event network-vif-deleted-4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 715.418310] env[62383]: INFO nova.compute.manager [req-b0ac8e9c-72d0-48df-bf11-2820c50f137e req-96dbdabb-1366-4ee1-8e38-dbbdcf31b012 service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Neutron deleted interface 4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8; detaching it from the instance and deleting it from the info cache [ 715.418310] env[62383]: DEBUG nova.network.neutron [req-b0ac8e9c-72d0-48df-bf11-2820c50f137e req-96dbdabb-1366-4ee1-8e38-dbbdcf31b012 service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.422548] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95ee875-f623-4827-a4c7-8f161831992d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.430292] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451436, 'name': CloneVM_Task, 'duration_secs': 1.802594} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.432293] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Created linked-clone VM from snapshot [ 715.434058] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f73f96f-5788-4f4e-b905-3580b7ed9bac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.446346] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451439, 'name': CreateVM_Task, 'duration_secs': 2.346664} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.446600] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 715.446600] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524b5ced-0bfa-7d0b-9206-25099297d6b3" [ 715.446600] env[62383]: _type = "HttpNfcLease" [ 715.446600] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 715.446989] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451452, 'name': Rename_Task, 'duration_secs': 0.217303} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.447694] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 583138d1-f928-4e33-a443-11c627203c44] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 715.448383] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 715.448383] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524b5ced-0bfa-7d0b-9206-25099297d6b3" [ 715.448383] env[62383]: _type = "HttpNfcLease" [ 715.448383] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 715.448542] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 715.449225] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a4c393-6a9a-4cb6-adce-ae14d911697f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.455226] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.455381] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.455700] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 715.456763] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d6029b-6dd7-4d99-9691-3f5a35b78b26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.459708] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-007fb457-95f4-4821-a391-f60d29020554 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.461395] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Uploading image ddf9a983-f0d0-4e2c-8ad6-601afa9df880 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 715.464902] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a888fab-c903-42e4-bfee-4e9cbc049313 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.492342] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5208c18a-1f4c-c9d6-6ca6-40cd240c613d/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 715.492549] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5208c18a-1f4c-c9d6-6ca6-40cd240c613d/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 715.497146] env[62383]: INFO nova.compute.manager [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Took 54.50 seconds to build instance. [ 715.499696] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17be8900-d655-4ea7-bff0-04489da160d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.503163] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 715.503163] env[62383]: value = "task-2451453" [ 715.503163] env[62383]: _type = "Task" [ 715.503163] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.503508] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 715.503508] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5205135e-b051-a811-21b2-56470e672be9" [ 715.503508] env[62383]: _type = "Task" [ 715.503508] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.580637] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 715.580637] env[62383]: value = "vm-496443" [ 715.580637] env[62383]: _type = "VirtualMachine" [ 715.580637] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 715.581844] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] The volume has not been displaced from its original location: [datastore2] volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e/volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 715.586981] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Reconfiguring VM instance instance-00000011 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 715.587454] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2829de43-5bc0-4e98-9d44-e1c525b45478 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.591327] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f45d44c-b6da-419d-a7b0-225e3d94b3dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.613985] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451453, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.613985] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5205135e-b051-a811-21b2-56470e672be9, 'name': SearchDatastore_Task, 'duration_secs': 0.013335} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.615290] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.615290] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.615290] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.615618] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.615618] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.615898] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79ac05f2-424c-409e-bf21-60121b07a9a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.622166] env[62383]: DEBUG oslo_vmware.api [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 715.622166] env[62383]: value = "task-2451455" [ 715.622166] env[62383]: _type = "Task" [ 715.622166] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.622166] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lease: (returnval){ [ 715.622166] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b55f38-5f22-5f7f-5845-74f9785321b0" [ 715.622166] env[62383]: _type = "HttpNfcLease" [ 715.622166] env[62383]: } obtained for exporting VM: (result){ [ 715.622166] env[62383]: value = "vm-496443" [ 715.622166] env[62383]: _type = "VirtualMachine" [ 715.622166] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 715.622166] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the lease: (returnval){ [ 715.622166] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b55f38-5f22-5f7f-5845-74f9785321b0" [ 715.622166] env[62383]: _type = "HttpNfcLease" [ 715.622166] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 715.631068] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.632580] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.632943] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-413c49f7-430e-4ff5-bcb0-22b5ec6f1e9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.645316] env[62383]: DEBUG oslo_vmware.api [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451455, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.645653] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 715.645653] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b55f38-5f22-5f7f-5845-74f9785321b0" [ 715.645653] env[62383]: _type = "HttpNfcLease" [ 715.645653] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 715.651727] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 715.651727] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52bd22d6-a7e5-e384-e49a-95170ae5224d" [ 715.651727] env[62383]: _type = "Task" [ 715.651727] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.661300] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bd22d6-a7e5-e384-e49a-95170ae5224d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.665932] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-00084b05-8d41-4b37-b343-63cd02acd1f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.696408] env[62383]: DEBUG nova.compute.manager [req-2a1bb3e3-8816-4596-abb4-ec7e64ff9c12 req-0df76a53-48e6-4b6c-8617-8d8850e106aa service nova] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Received event network-vif-deleted-ea3f5faa-5f1f-4491-b1f7-c51e4ae2fdc7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 715.696408] env[62383]: DEBUG nova.compute.manager [req-2a1bb3e3-8816-4596-abb4-ec7e64ff9c12 req-0df76a53-48e6-4b6c-8617-8d8850e106aa service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Received event network-vif-deleted-d6741482-4f9c-47b3-83b3-2c1ed7a7fbad {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 715.696551] env[62383]: INFO nova.compute.manager [req-2a1bb3e3-8816-4596-abb4-ec7e64ff9c12 req-0df76a53-48e6-4b6c-8617-8d8850e106aa service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Neutron deleted interface d6741482-4f9c-47b3-83b3-2c1ed7a7fbad; detaching it from the instance and deleting it from the info cache [ 715.697192] env[62383]: DEBUG nova.network.neutron [req-2a1bb3e3-8816-4596-abb4-ec7e64ff9c12 req-0df76a53-48e6-4b6c-8617-8d8850e106aa service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.911023] env[62383]: INFO nova.compute.manager [-] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Took 1.65 seconds to deallocate network for instance. [ 715.911023] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 7.720s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.911023] env[62383]: DEBUG nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 715.914764] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.233s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.917171] env[62383]: INFO nova.compute.claims [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 715.932227] env[62383]: DEBUG nova.network.neutron [-] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.932227] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e4570e9-11ef-469f-8c9c-1b6879281d80 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.944323] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b3de9c-7280-4591-bd2a-fd8ef05bc17e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.988219] env[62383]: INFO nova.compute.manager [-] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Took 1.60 seconds to deallocate network for instance. [ 715.988858] env[62383]: DEBUG nova.compute.manager [req-b0ac8e9c-72d0-48df-bf11-2820c50f137e req-96dbdabb-1366-4ee1-8e38-dbbdcf31b012 service nova] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Detach interface failed, port_id=4e965d9c-3c3b-4e1c-9e95-5ab5d671a4e8, reason: Instance 6b5daa17-ad4a-4b30-a1fe-083a1a238667 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 716.005334] env[62383]: DEBUG oslo_concurrency.lockutils [None req-097fb3e7-ee7c-45d7-8fb7-7750aa987582 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.966s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.016754] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451453, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.154025] env[62383]: DEBUG oslo_vmware.api [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451455, 'name': ReconfigVM_Task, 'duration_secs': 0.285723} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.154338] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 716.154338] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b55f38-5f22-5f7f-5845-74f9785321b0" [ 716.154338] env[62383]: _type = "HttpNfcLease" [ 716.154338] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 716.162098] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Reconfigured VM instance instance-00000011 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 716.168567] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 716.168567] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b55f38-5f22-5f7f-5845-74f9785321b0" [ 716.168567] env[62383]: _type = "HttpNfcLease" [ 716.168567] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 716.169295] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d94575f1-1110-4abe-8d17-67ab543f72f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.180637] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c429b53e-acfe-49f7-a852-4078fb295176 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.192732] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bd22d6-a7e5-e384-e49a-95170ae5224d, 'name': SearchDatastore_Task, 'duration_secs': 0.021384} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.197819] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd6d29-bedb-4e43-1228-ebeee8c2551c/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 716.199044] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd6d29-bedb-4e43-1228-ebeee8c2551c/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 716.199509] env[62383]: DEBUG oslo_vmware.api [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 716.199509] env[62383]: value = "task-2451456" [ 716.199509] env[62383]: _type = "Task" [ 716.199509] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.199858] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa4f2cca-2655-4a31-b524-c691cb854af7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.268369] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39290955-97ba-4466-9140-37adedb8c4b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.276494] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 716.276494] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522628d2-1351-3a22-9e77-ff0dc9b1a1eb" [ 716.276494] env[62383]: _type = "Task" [ 716.276494] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.280792] env[62383]: DEBUG oslo_vmware.api [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451456, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.290029] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5d8c47-d94f-4d34-b5e7-1738d99be9c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.308258] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522628d2-1351-3a22-9e77-ff0dc9b1a1eb, 'name': SearchDatastore_Task, 'duration_secs': 0.01387} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.311441] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.312519] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 583138d1-f928-4e33-a443-11c627203c44/583138d1-f928-4e33-a443-11c627203c44.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.312667] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-973bf206-636a-4c08-92b6-001fde282197 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.315461] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2c417a95-dc09-4704-8925-194872255f04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.337700] env[62383]: DEBUG nova.compute.manager [req-2a1bb3e3-8816-4596-abb4-ec7e64ff9c12 req-0df76a53-48e6-4b6c-8617-8d8850e106aa service nova] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Detach interface failed, port_id=d6741482-4f9c-47b3-83b3-2c1ed7a7fbad, reason: Instance 872ac212-9f29-426d-94c7-e1bf73aebd94 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 716.340587] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 716.340587] env[62383]: value = "task-2451457" [ 716.340587] env[62383]: _type = "Task" [ 716.340587] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.352964] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451457, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.424816] env[62383]: DEBUG nova.compute.utils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 716.425689] env[62383]: DEBUG nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 716.425916] env[62383]: DEBUG nova.network.neutron [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 716.432419] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.433911] env[62383]: INFO nova.compute.manager [-] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Took 1.92 seconds to deallocate network for instance. [ 716.491854] env[62383]: DEBUG nova.policy [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8500ba91ab5445d82406ff31a9ea721', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e20c8351a13a427db4fccbac7108c205', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 716.495192] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.511516] env[62383]: DEBUG nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 716.525306] env[62383]: DEBUG oslo_vmware.api [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2451453, 'name': PowerOnVM_Task, 'duration_secs': 0.702235} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.525899] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 716.525899] env[62383]: INFO nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Took 9.07 seconds to spawn the instance on the hypervisor. [ 716.526319] env[62383]: DEBUG nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.527166] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ede17e1-8fd1-4a69-a3a4-ce7dce3d1b58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.716345] env[62383]: DEBUG oslo_vmware.api [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451456, 'name': ReconfigVM_Task, 'duration_secs': 0.191489} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.717291] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496433', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'name': 'volume-789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '80821717-f961-49c7-8b79-c152edfdfb94', 'attached_at': '', 'detached_at': '', 'volume_id': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e', 'serial': '789417f0-0a0f-41c6-9067-c1e9bc8fd22e'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 716.857103] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451457, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.877237] env[62383]: DEBUG nova.network.neutron [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Successfully created port: 45641616-950c-40e9-8a0f-76d8fd08bc82 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 716.903761] env[62383]: DEBUG nova.compute.manager [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Received event network-changed-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 716.905354] env[62383]: DEBUG nova.compute.manager [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Refreshing instance network info cache due to event network-changed-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 716.905798] env[62383]: DEBUG oslo_concurrency.lockutils [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] Acquiring lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.906021] env[62383]: DEBUG oslo_concurrency.lockutils [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] Acquired lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.906214] env[62383]: DEBUG nova.network.neutron [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Refreshing network info cache for port d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.933194] env[62383]: DEBUG nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 716.944300] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.039907] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.061486] env[62383]: INFO nova.compute.manager [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Took 52.86 seconds to build instance. [ 717.302673] env[62383]: DEBUG nova.objects.instance [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lazy-loading 'flavor' on Instance uuid 80821717-f961-49c7-8b79-c152edfdfb94 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 717.360818] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451457, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550298} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.361520] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 583138d1-f928-4e33-a443-11c627203c44/583138d1-f928-4e33-a443-11c627203c44.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 717.361520] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 717.361993] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-681d6fb7-ccac-4866-8f91-0add8919a17d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.376302] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 717.376302] env[62383]: value = "task-2451458" [ 717.376302] env[62383]: _type = "Task" [ 717.376302] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.389970] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451458, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.567717] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f45d6647-649d-404c-9849-a74a5cce8bda tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.545s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.663991] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a34bed1-823f-4943-90d6-0bd8f7e81c2a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.678712] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0796fa5-baa6-4fcd-ae52-797a7253db78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.719120] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce34bd3-b9a0-467c-9611-80008e30a2b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.728754] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93631a8-70ca-4308-8021-32385e86d335 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.745666] env[62383]: DEBUG nova.compute.provider_tree [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 717.748709] env[62383]: DEBUG nova.network.neutron [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Updated VIF entry in instance network info cache for port d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.748709] env[62383]: DEBUG nova.network.neutron [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Updating instance_info_cache with network_info: [{"id": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "address": "fa:16:3e:ed:c5:82", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2ef4e8d-94", "ovs_interfaceid": "d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.891421] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451458, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083766} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.891914] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 717.892800] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ee9fb5-e0bf-4f4e-9555-175c678a2f4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.918275] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 583138d1-f928-4e33-a443-11c627203c44/583138d1-f928-4e33-a443-11c627203c44.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.918669] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b31bd4a-53ed-4ed8-a0b3-057aafbca183 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.942281] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 717.942281] env[62383]: value = "task-2451459" [ 717.942281] env[62383]: _type = "Task" [ 717.942281] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.947108] env[62383]: DEBUG nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 717.956529] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451459, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.070383] env[62383]: DEBUG nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 718.253136] env[62383]: DEBUG nova.scheduler.client.report [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 718.261119] env[62383]: DEBUG oslo_concurrency.lockutils [req-50d6501c-a7bb-42fb-8853-45be32be2952 req-7d1795ec-51ba-47ea-bf49-12a3ef16ff3e service nova] Releasing lock "refresh_cache-67f05a2b-f323-4e4a-ac13-7f4745593be0" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.312021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-00d0b310-b6c8-4f88-8006-6ed203410f29 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.221s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.453803] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451459, 'name': ReconfigVM_Task, 'duration_secs': 0.489606} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.454171] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 583138d1-f928-4e33-a443-11c627203c44/583138d1-f928-4e33-a443-11c627203c44.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.454858] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93f00d92-6e0b-4069-bf59-b30118afb5bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.464395] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 718.464395] env[62383]: value = "task-2451460" [ 718.464395] env[62383]: _type = "Task" [ 718.464395] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.475715] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451460, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.584654] env[62383]: DEBUG nova.network.neutron [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Successfully updated port: 45641616-950c-40e9-8a0f-76d8fd08bc82 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 718.601619] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.766444] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.848s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.766444] env[62383]: DEBUG nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 718.768986] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.834s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.777702] env[62383]: INFO nova.compute.claims [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.798227] env[62383]: DEBUG nova.compute.manager [req-58aba578-c2a2-405d-a863-a5b0a6d547b6 req-288ddc85-93f7-4b42-94b1-760d8077f438 service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Received event network-vif-plugged-45641616-950c-40e9-8a0f-76d8fd08bc82 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 718.798227] env[62383]: DEBUG oslo_concurrency.lockutils [req-58aba578-c2a2-405d-a863-a5b0a6d547b6 req-288ddc85-93f7-4b42-94b1-760d8077f438 service nova] Acquiring lock "a10f5b03-c45b-4cc2-923f-3227665d236c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.798227] env[62383]: DEBUG oslo_concurrency.lockutils [req-58aba578-c2a2-405d-a863-a5b0a6d547b6 req-288ddc85-93f7-4b42-94b1-760d8077f438 service nova] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.798903] env[62383]: DEBUG oslo_concurrency.lockutils [req-58aba578-c2a2-405d-a863-a5b0a6d547b6 req-288ddc85-93f7-4b42-94b1-760d8077f438 service nova] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.799303] env[62383]: DEBUG nova.compute.manager [req-58aba578-c2a2-405d-a863-a5b0a6d547b6 req-288ddc85-93f7-4b42-94b1-760d8077f438 service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] No waiting events found dispatching network-vif-plugged-45641616-950c-40e9-8a0f-76d8fd08bc82 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 718.800336] env[62383]: WARNING nova.compute.manager [req-58aba578-c2a2-405d-a863-a5b0a6d547b6 req-288ddc85-93f7-4b42-94b1-760d8077f438 service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Received unexpected event network-vif-plugged-45641616-950c-40e9-8a0f-76d8fd08bc82 for instance with vm_state building and task_state spawning. [ 718.978862] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451460, 'name': Rename_Task, 'duration_secs': 0.21018} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.978862] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 718.978862] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4b23da8-1b17-4613-b4e2-f95d23f88c72 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.985516] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 718.985516] env[62383]: value = "task-2451461" [ 718.985516] env[62383]: _type = "Task" [ 718.985516] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.994315] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.087475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.087475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.087475] env[62383]: DEBUG nova.network.neutron [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 719.280263] env[62383]: DEBUG nova.compute.utils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 719.290024] env[62383]: DEBUG nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 719.290024] env[62383]: DEBUG nova.network.neutron [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.353553] env[62383]: DEBUG nova.policy [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d374b5a04f94016b0f5aa198b02b40b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8d1b45dd8d74bf9a01173d57990d06b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 719.496836] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451461, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.624791] env[62383]: DEBUG nova.network.neutron [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 719.646348] env[62383]: DEBUG nova.network.neutron [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Successfully created port: 796c3e3e-48f2-4d7f-8f7d-974f792c4426 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.765254] env[62383]: DEBUG nova.network.neutron [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Updating instance_info_cache with network_info: [{"id": "45641616-950c-40e9-8a0f-76d8fd08bc82", "address": "fa:16:3e:0c:24:4a", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45641616-95", "ovs_interfaceid": "45641616-950c-40e9-8a0f-76d8fd08bc82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.788644] env[62383]: DEBUG nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.999298] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451461, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.268945] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.269266] env[62383]: DEBUG nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Instance network_info: |[{"id": "45641616-950c-40e9-8a0f-76d8fd08bc82", "address": "fa:16:3e:0c:24:4a", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45641616-95", "ovs_interfaceid": "45641616-950c-40e9-8a0f-76d8fd08bc82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 720.353109] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f1dabc5-cbdd-4871-8879-914425430f6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.363335] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834f28b6-adbb-4330-9d29-407f92c028c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.398845] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76c3d13-63fd-46da-b8cf-ca98acf25411 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.408822] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3777a89e-63f5-4957-aaae-ab20f00ac85b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.425623] env[62383]: DEBUG nova.compute.provider_tree [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.498468] env[62383]: DEBUG oslo_vmware.api [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451461, 'name': PowerOnVM_Task, 'duration_secs': 1.054923} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.498722] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.498916] env[62383]: INFO nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Took 10.28 seconds to spawn the instance on the hypervisor. [ 720.499109] env[62383]: DEBUG nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.499921] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e18bf12-524b-4d9e-8f9b-11f9410dd9d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.800477] env[62383]: DEBUG nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 720.929100] env[62383]: DEBUG nova.scheduler.client.report [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.019961] env[62383]: INFO nova.compute.manager [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Took 49.93 seconds to build instance. [ 721.137227] env[62383]: DEBUG nova.network.neutron [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Successfully updated port: 796c3e3e-48f2-4d7f-8f7d-974f792c4426 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.434738] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.435392] env[62383]: DEBUG nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 721.438080] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.267s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.438306] env[62383]: DEBUG nova.objects.instance [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lazy-loading 'resources' on Instance uuid 17498cb6-8b16-4a2e-96ae-c594966cee77 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.522183] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fe6793b0-0d8d-4635-ae7f-a6277896f58c tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.195s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.639397] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.639657] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.639895] env[62383]: DEBUG nova.network.neutron [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.946644] env[62383]: DEBUG nova.compute.utils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.949534] env[62383]: DEBUG nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 721.949534] env[62383]: DEBUG nova.network.neutron [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.000610] env[62383]: DEBUG nova.policy [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52222401600845bcb88d02f000771658', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2b77864c75943b4a625276225c3aac9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 722.024902] env[62383]: DEBUG nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.186466] env[62383]: DEBUG nova.network.neutron [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.329272] env[62383]: DEBUG nova.network.neutron [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Successfully created port: 1ee3599c-2ed4-4e36-9e36-3446e6178380 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.357960] env[62383]: DEBUG nova.network.neutron [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance_info_cache with network_info: [{"id": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "address": "fa:16:3e:9b:eb:a7", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap796c3e3e-48", "ovs_interfaceid": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.455219] env[62383]: DEBUG nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 722.549159] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9393d2-3e71-414e-a4c7-75d95f968b84 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.553324] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.559554] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29795375-1140-4c85-bd4a-f8f4c412346d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.592630] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c09cd3-655b-453f-8ed9-ecd80fac052c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.602064] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c936fa34-9bb2-4073-bbce-9d2d50b526db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.617915] env[62383]: DEBUG nova.compute.provider_tree [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.861519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.861888] env[62383]: DEBUG nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Instance network_info: |[{"id": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "address": "fa:16:3e:9b:eb:a7", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap796c3e3e-48", "ovs_interfaceid": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 723.121533] env[62383]: DEBUG nova.scheduler.client.report [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.468492] env[62383]: DEBUG nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 723.627027] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.629545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.949s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.631427] env[62383]: INFO nova.compute.claims [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.654361] env[62383]: INFO nova.scheduler.client.report [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Deleted allocations for instance 17498cb6-8b16-4a2e-96ae-c594966cee77 [ 723.823431] env[62383]: DEBUG nova.network.neutron [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Successfully updated port: 1ee3599c-2ed4-4e36-9e36-3446e6178380 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.162432] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb073f38-c0c8-45ea-b143-b4515c8c27cb tempest-FloatingIPsAssociationNegativeTestJSON-1813744616 tempest-FloatingIPsAssociationNegativeTestJSON-1813744616-project-member] Lock "17498cb6-8b16-4a2e-96ae-c594966cee77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.657s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.326442] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "refresh_cache-563840a8-8fa7-4bfa-9912-933c14e7076a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.326651] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "refresh_cache-563840a8-8fa7-4bfa-9912-933c14e7076a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.326693] env[62383]: DEBUG nova.network.neutron [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 724.866842] env[62383]: DEBUG nova.network.neutron [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.016328] env[62383]: DEBUG nova.network.neutron [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Updating instance_info_cache with network_info: [{"id": "1ee3599c-2ed4-4e36-9e36-3446e6178380", "address": "fa:16:3e:3e:ad:97", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ee3599c-2e", "ovs_interfaceid": "1ee3599c-2ed4-4e36-9e36-3446e6178380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.140711] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb85265-f643-4179-985d-0b9e30e8f019 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.149010] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57afb0b3-828d-4846-aca8-586a8c6cf339 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.181591] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb6ac6a-e317-49e7-be10-ef78330899f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.189986] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21fb99e3-5746-455e-8c9c-5203df1416a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.204378] env[62383]: DEBUG nova.compute.provider_tree [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.293954] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 725.294168] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.294319] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 725.294509] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.294661] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 725.294821] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 725.295120] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 725.295226] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 725.295383] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 725.295548] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 725.295727] env[62383]: DEBUG nova.virt.hardware [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 725.297907] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cdb00d-19da-43ab-b389-10b0a4633aa2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.309581] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:26:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='4fbcd04d-cb08-4e45-b5c9-0176dc87583e',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1902383657',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 725.309809] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.309966] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 725.310171] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.310319] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 725.310466] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 725.310667] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 725.310830] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 725.310996] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 725.311184] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 725.311357] env[62383]: DEBUG nova.virt.hardware [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 725.312784] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6bba78-61da-48f5-bbad-406b12ae65e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.318292] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3270595-ec70-4ece-bf4b-4eaa9f47b040 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.331910] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:24:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45641616-950c-40e9-8a0f-76d8fd08bc82', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.340347] env[62383]: DEBUG oslo.service.loopingcall [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.345164] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 725.345495] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.345670] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 725.345898] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.346595] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 725.346961] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 725.348127] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 725.348127] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 725.348263] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 725.348690] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 725.349079] env[62383]: DEBUG nova.virt.hardware [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 725.349641] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 725.353302] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375efa5c-414d-4612-86f7-fc50acd19879 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.357465] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f90654bc-b7fc-4704-bc7e-a1a43d9abf6e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.382274] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d1ff01-1e70-47a7-9b5b-65702814ea0f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.389327] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd6d29-bedb-4e43-1228-ebeee8c2551c/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 725.390614] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3c4d06-c176-4a87-8994-15cac320e96f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.398706] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b81695-507a-4ad4-8419-a346feaa777c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.413101] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:eb:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '796c3e3e-48f2-4d7f-8f7d-974f792c4426', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.420662] env[62383]: DEBUG oslo.service.loopingcall [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.420921] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd6d29-bedb-4e43-1228-ebeee8c2551c/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 725.421085] env[62383]: ERROR oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd6d29-bedb-4e43-1228-ebeee8c2551c/disk-0.vmdk due to incomplete transfer. [ 725.421298] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.421298] env[62383]: value = "task-2451462" [ 725.421298] env[62383]: _type = "Task" [ 725.421298] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.423629] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 725.423873] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d5fd3502-019f-4341-905f-98d313537a49 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.435162] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ed89ea1-f8b9-4e11-b20e-6a2745709180 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.452243] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5208c18a-1f4c-c9d6-6ca6-40cd240c613d/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 725.453444] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb35fb0-8de5-4400-ab31-b083c723b29a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.460708] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5208c18a-1f4c-c9d6-6ca6-40cd240c613d/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 725.460866] env[62383]: ERROR oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5208c18a-1f4c-c9d6-6ca6-40cd240c613d/disk-0.vmdk due to incomplete transfer. [ 725.465415] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c2416b5d-1663-4860-b1f0-af5446ec8799 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.467145] env[62383]: DEBUG oslo_vmware.rw_handles [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52cd6d29-bedb-4e43-1228-ebeee8c2551c/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 725.467361] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Uploaded image ddf9a983-f0d0-4e2c-8ad6-601afa9df880 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 725.470727] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 725.470727] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451462, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.470727] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.470727] env[62383]: value = "task-2451463" [ 725.470727] env[62383]: _type = "Task" [ 725.470727] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.470727] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-13b912e3-d061-4f3e-93bc-b9ded2207e39 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.480973] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451463, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.483014] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 725.483014] env[62383]: value = "task-2451464" [ 725.483014] env[62383]: _type = "Task" [ 725.483014] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.483330] env[62383]: DEBUG oslo_vmware.rw_handles [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5208c18a-1f4c-c9d6-6ca6-40cd240c613d/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 725.483564] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Uploaded image 2c35654f-cf3e-45f8-bb13-ed76aec806dc to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 725.485816] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 725.486052] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0dbc0aa6-4cc5-46ab-a0e7-721a5980e497 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.498752] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451464, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.500413] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 725.500413] env[62383]: value = "task-2451465" [ 725.500413] env[62383]: _type = "Task" [ 725.500413] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.509861] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451465, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.518713] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "refresh_cache-563840a8-8fa7-4bfa-9912-933c14e7076a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.519182] env[62383]: DEBUG nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Instance network_info: |[{"id": "1ee3599c-2ed4-4e36-9e36-3446e6178380", "address": "fa:16:3e:3e:ad:97", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ee3599c-2e", "ovs_interfaceid": "1ee3599c-2ed4-4e36-9e36-3446e6178380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 725.519684] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:ad:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ee3599c-2ed4-4e36-9e36-3446e6178380', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.528156] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Creating folder: Project (f2b77864c75943b4a625276225c3aac9). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.528595] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c10ef71-ff55-45fa-b8cf-83bbdf63c06c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.542749] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Created folder: Project (f2b77864c75943b4a625276225c3aac9) in parent group-v496304. [ 725.543022] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Creating folder: Instances. Parent ref: group-v496449. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.543256] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd3ba088-5d3b-46ee-8901-dde4a6bb236d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.555168] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Created folder: Instances in parent group-v496449. [ 725.555475] env[62383]: DEBUG oslo.service.loopingcall [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 725.555615] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 725.555954] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9badb28a-b664-4a8f-8fc0-1dafc7d13efa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.578783] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.578783] env[62383]: value = "task-2451468" [ 725.578783] env[62383]: _type = "Task" [ 725.578783] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.589645] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451468, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.708452] env[62383]: DEBUG nova.scheduler.client.report [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 725.936338] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451462, 'name': CreateVM_Task, 'duration_secs': 0.428815} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.936537] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 725.937196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.937410] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.937928] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 725.938235] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf8cdf78-9c91-4e02-8780-2559a5e86221 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.943798] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 725.943798] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52327a44-4bd0-efe7-d988-418014592d88" [ 725.943798] env[62383]: _type = "Task" [ 725.943798] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.953262] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52327a44-4bd0-efe7-d988-418014592d88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.983691] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451463, 'name': CreateVM_Task, 'duration_secs': 0.468883} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.983884] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 725.984588] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.997241] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451464, 'name': Destroy_Task, 'duration_secs': 0.355893} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.997525] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Destroyed the VM [ 725.997760] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 725.998027] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5d6f499d-38b4-431b-8544-de069f3f60d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.005623] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 726.005623] env[62383]: value = "task-2451469" [ 726.005623] env[62383]: _type = "Task" [ 726.005623] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.012186] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451465, 'name': Destroy_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.017585] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451469, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.090034] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451468, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.214940] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.216653] env[62383]: DEBUG nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 726.218676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.620s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.222084] env[62383]: DEBUG nova.objects.instance [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 726.282103] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.282425] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.344529] env[62383]: DEBUG nova.compute.manager [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Received event network-changed-45641616-950c-40e9-8a0f-76d8fd08bc82 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 726.344722] env[62383]: DEBUG nova.compute.manager [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Refreshing instance network info cache due to event network-changed-45641616-950c-40e9-8a0f-76d8fd08bc82. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 726.345073] env[62383]: DEBUG oslo_concurrency.lockutils [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] Acquiring lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.345144] env[62383]: DEBUG oslo_concurrency.lockutils [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] Acquired lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.345775] env[62383]: DEBUG nova.network.neutron [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Refreshing network info cache for port 45641616-950c-40e9-8a0f-76d8fd08bc82 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.456362] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52327a44-4bd0-efe7-d988-418014592d88, 'name': SearchDatastore_Task, 'duration_secs': 0.010907} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.456701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.457019] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.457294] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.457460] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.457663] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.457980] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.458350] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 726.458622] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94148409-794a-446f-bebd-9f2d29982fd7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.460723] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf06b3ab-1a9a-46a3-8594-a2e4f39bf6c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.466502] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 726.466502] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d4f596-e89d-38d5-0e37-ad8ada8f97aa" [ 726.466502] env[62383]: _type = "Task" [ 726.466502] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.470932] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.471154] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.473077] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8594964c-abdc-4787-824e-32e13a080e8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.476937] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d4f596-e89d-38d5-0e37-ad8ada8f97aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.480455] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 726.480455] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fea90a-4208-8c9a-c04b-f5b45e6ff752" [ 726.480455] env[62383]: _type = "Task" [ 726.480455] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.491912] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fea90a-4208-8c9a-c04b-f5b45e6ff752, 'name': SearchDatastore_Task, 'duration_secs': 0.009056} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.492713] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef430e4-da41-4b73-9906-2850f3efffa8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.499350] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 726.499350] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a24c48-38da-42fe-c585-bc4b3e5b68d3" [ 726.499350] env[62383]: _type = "Task" [ 726.499350] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.513850] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a24c48-38da-42fe-c585-bc4b3e5b68d3, 'name': SearchDatastore_Task, 'duration_secs': 0.011103} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.517362] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.517639] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a10f5b03-c45b-4cc2-923f-3227665d236c/a10f5b03-c45b-4cc2-923f-3227665d236c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 726.517916] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451465, 'name': Destroy_Task, 'duration_secs': 0.625194} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.518132] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef3bed9e-fa59-42e8-bfbd-449a047ccb08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.520198] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Destroyed the VM [ 726.520393] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 726.523815] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-addadb6d-9d9c-47ef-bbb3-c5a973cf4ec6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.525240] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451469, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.531755] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 726.531755] env[62383]: value = "task-2451470" [ 726.531755] env[62383]: _type = "Task" [ 726.531755] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.533031] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 726.533031] env[62383]: value = "task-2451471" [ 726.533031] env[62383]: _type = "Task" [ 726.533031] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.544956] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451471, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.547958] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.589919] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451468, 'name': CreateVM_Task, 'duration_secs': 0.619699} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.590127] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 726.590923] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.724388] env[62383]: DEBUG nova.compute.utils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.731101] env[62383]: DEBUG nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 726.731101] env[62383]: DEBUG nova.network.neutron [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 726.781494] env[62383]: DEBUG nova.policy [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db7e9998210e485fa855f0375f63ad55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35016a724e7e4fa2b0fc19396d8e736b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 726.979113] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d4f596-e89d-38d5-0e37-ad8ada8f97aa, 'name': SearchDatastore_Task, 'duration_secs': 0.01093} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.979461] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.979706] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.984031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.984286] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.984489] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.984827] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.985157] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 726.985395] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26f6ad8b-43cb-447e-9a12-e02cf82b228e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.987360] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-110fe316-be6d-4072-a754-9d1cf4cf0a13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.995390] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 726.995390] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a90661-ed1b-9fe7-ab07-c931ac6946e5" [ 726.995390] env[62383]: _type = "Task" [ 726.995390] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.000276] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.000449] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 727.001772] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1f34dd2-cb06-4e71-bea4-a4ece249cb7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.008357] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a90661-ed1b-9fe7-ab07-c931ac6946e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.017826] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 727.017826] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52580f51-530f-97e4-4ade-64471e05365b" [ 727.017826] env[62383]: _type = "Task" [ 727.017826] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.027690] env[62383]: DEBUG oslo_vmware.api [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451469, 'name': RemoveSnapshot_Task, 'duration_secs': 0.527975} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.028322] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 727.028584] env[62383]: INFO nova.compute.manager [None req-c951ff10-9215-447c-bbf7-8161c694c03b tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Took 16.55 seconds to snapshot the instance on the hypervisor. [ 727.034776] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52580f51-530f-97e4-4ade-64471e05365b, 'name': SearchDatastore_Task, 'duration_secs': 0.010662} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.040758] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e19168f1-a41d-4ee1-9c3e-6df970a0d778 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.061827] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451471, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.062901] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451470, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.062901] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 727.062901] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522db481-f2ab-662f-f3ac-785de56c28b8" [ 727.062901] env[62383]: _type = "Task" [ 727.062901] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.071804] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522db481-f2ab-662f-f3ac-785de56c28b8, 'name': SearchDatastore_Task, 'duration_secs': 0.012909} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.072691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.072691] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.072691] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cbf95e4-b618-4fbd-b479-aab2f7b17602 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.082653] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 727.082653] env[62383]: value = "task-2451472" [ 727.082653] env[62383]: _type = "Task" [ 727.082653] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.091578] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.106206] env[62383]: DEBUG nova.network.neutron [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Successfully created port: 5773169e-f9fe-4180-8237-10b88641ce09 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.232463] env[62383]: DEBUG oslo_concurrency.lockutils [None req-893dc8b1-c0e9-4ea9-8c7c-06b1f2dd12e3 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.233774] env[62383]: DEBUG nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 727.236940] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.508s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.237224] env[62383]: DEBUG nova.objects.instance [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lazy-loading 'resources' on Instance uuid 67d41910-54e1-48f1-b0d3-f34a62595ef2 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.389110] env[62383]: DEBUG oslo_concurrency.lockutils [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "583138d1-f928-4e33-a443-11c627203c44" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.389247] env[62383]: DEBUG oslo_concurrency.lockutils [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.389898] env[62383]: INFO nova.compute.manager [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Rebooting instance [ 727.517221] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a90661-ed1b-9fe7-ab07-c931ac6946e5, 'name': SearchDatastore_Task, 'duration_secs': 0.020762} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.517680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.517978] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 727.518254] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.518419] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.518686] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 727.518925] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f1665da-be64-4518-a22d-41e2d96b6b36 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.534365] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 727.534558] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 727.537792] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b16fb95-2f94-4813-b473-b5f7196a1ade {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.553314] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 727.553314] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525f9888-440b-e7f2-c44c-d7e2b7438141" [ 727.553314] env[62383]: _type = "Task" [ 727.553314] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.563021] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451470, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549269} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.563289] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451471, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.568949] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a10f5b03-c45b-4cc2-923f-3227665d236c/a10f5b03-c45b-4cc2-923f-3227665d236c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.569236] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.569833] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-abcc2b54-9d5d-47c7-af5d-81c89bf2da42 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.578743] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525f9888-440b-e7f2-c44c-d7e2b7438141, 'name': SearchDatastore_Task, 'duration_secs': 0.011109} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.580734] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 727.580734] env[62383]: value = "task-2451473" [ 727.580734] env[62383]: _type = "Task" [ 727.580734] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.580956] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2461a399-56d2-4e34-b744-effe77a3b103 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.604714] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 727.604714] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c8321e-5c09-57ad-d478-4c113fa58bb8" [ 727.604714] env[62383]: _type = "Task" [ 727.604714] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.608682] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467069} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.608947] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451473, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.612227] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.612440] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.612700] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25b6e910-e09a-4749-9417-995e06dafa01 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.621372] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c8321e-5c09-57ad-d478-4c113fa58bb8, 'name': SearchDatastore_Task, 'duration_secs': 0.010708} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.622904] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.623329] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 563840a8-8fa7-4bfa-9912-933c14e7076a/563840a8-8fa7-4bfa-9912-933c14e7076a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.623629] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 727.623629] env[62383]: value = "task-2451474" [ 727.623629] env[62383]: _type = "Task" [ 727.623629] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.623825] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0306794-6ada-4b47-9c3b-b739ba3e8473 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.636543] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.638112] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 727.638112] env[62383]: value = "task-2451475" [ 727.638112] env[62383]: _type = "Task" [ 727.638112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.648981] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.664348] env[62383]: DEBUG nova.network.neutron [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Updated VIF entry in instance network info cache for port 45641616-950c-40e9-8a0f-76d8fd08bc82. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 727.664853] env[62383]: DEBUG nova.network.neutron [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Updating instance_info_cache with network_info: [{"id": "45641616-950c-40e9-8a0f-76d8fd08bc82", "address": "fa:16:3e:0c:24:4a", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45641616-95", "ovs_interfaceid": "45641616-950c-40e9-8a0f-76d8fd08bc82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.916325] env[62383]: DEBUG oslo_concurrency.lockutils [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.916550] env[62383]: DEBUG oslo_concurrency.lockutils [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquired lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.916771] env[62383]: DEBUG nova.network.neutron [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.059785] env[62383]: DEBUG oslo_vmware.api [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451471, 'name': RemoveSnapshot_Task, 'duration_secs': 1.052248} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.060220] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 728.061025] env[62383]: INFO nova.compute.manager [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Took 18.12 seconds to snapshot the instance on the hypervisor. [ 728.096583] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451473, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100023} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.100758] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.102059] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b50bc0-41ba-4bf2-bd02-45d5e1f2a80e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.130474] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] a10f5b03-c45b-4cc2-923f-3227665d236c/a10f5b03-c45b-4cc2-923f-3227665d236c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.133405] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a08646ca-6f0a-4e0b-bb82-4cd2448bff12 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.162396] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079204} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.169770] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.170386] env[62383]: DEBUG oslo_concurrency.lockutils [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] Releasing lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.170655] env[62383]: DEBUG nova.compute.manager [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Received event network-changed-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 728.170826] env[62383]: DEBUG nova.compute.manager [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Refreshing instance network info cache due to event network-changed-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 728.171097] env[62383]: DEBUG oslo_concurrency.lockutils [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] Acquiring lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.171319] env[62383]: DEBUG oslo_concurrency.lockutils [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] Acquired lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.171597] env[62383]: DEBUG nova.network.neutron [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Refreshing network info cache for port 9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 728.172754] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451475, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451743} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.173074] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 728.173074] env[62383]: value = "task-2451476" [ 728.173074] env[62383]: _type = "Task" [ 728.173074] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.174095] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e3f406-5d93-4e32-ae5f-66e475de14b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.176690] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 563840a8-8fa7-4bfa-9912-933c14e7076a/563840a8-8fa7-4bfa-9912-933c14e7076a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 728.176916] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 728.177455] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a614f2a6-c468-4582-b584-13318cde462d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.194523] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.222335] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.222335] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 728.222335] env[62383]: value = "task-2451477" [ 728.222335] env[62383]: _type = "Task" [ 728.222335] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.225069] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ed2e267-60a2-4976-9276-0ac054287687 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.243273] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "f28beb17-8455-49d3-8be0-7636b9abe4e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.243595] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.246737] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "f28beb17-8455-49d3-8be0-7636b9abe4e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.246737] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.246737] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.251164] env[62383]: DEBUG nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 728.257432] env[62383]: INFO nova.compute.manager [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Terminating instance [ 728.262542] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 728.262542] env[62383]: value = "task-2451478" [ 728.262542] env[62383]: _type = "Task" [ 728.262542] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.270452] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451477, 'name': ExtendVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.278378] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.384966] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 728.384966] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.384966] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 728.384966] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.385438] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 728.385438] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 728.385517] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 728.385726] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 728.386092] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 728.386252] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 728.386387] env[62383]: DEBUG nova.virt.hardware [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 728.387424] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71f2d8e-ca7e-4fe1-a14b-c81ace2d3a9a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.400708] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789af051-16b0-4b33-ba3c-c14913263c51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.513409] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Received event network-vif-plugged-796c3e3e-48f2-4d7f-8f7d-974f792c4426 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 728.513701] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Acquiring lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.516771] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.516771] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.516771] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] No waiting events found dispatching network-vif-plugged-796c3e3e-48f2-4d7f-8f7d-974f792c4426 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 728.516771] env[62383]: WARNING nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Received unexpected event network-vif-plugged-796c3e3e-48f2-4d7f-8f7d-974f792c4426 for instance with vm_state building and task_state spawning. [ 728.516771] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Received event network-changed-796c3e3e-48f2-4d7f-8f7d-974f792c4426 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 728.517331] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Refreshing instance network info cache due to event network-changed-796c3e3e-48f2-4d7f-8f7d-974f792c4426. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 728.517331] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Acquiring lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.517331] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Acquired lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.517331] env[62383]: DEBUG nova.network.neutron [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Refreshing network info cache for port 796c3e3e-48f2-4d7f-8f7d-974f792c4426 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 728.610317] env[62383]: DEBUG nova.compute.manager [None req-eb992ca3-beb4-47a3-b0d4-a1fb3251b7ff tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Found 2 images (rotation: 2) {{(pid=62383) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 728.633365] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df94f96-8e89-47a5-bb0e-d856bb0c9e2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.643767] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e427a1c-7d52-492b-aae8-b9833704da8a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.685756] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f23fad-1089-4e26-bdd0-4cbfddb97165 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.695290] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451476, 'name': ReconfigVM_Task, 'duration_secs': 0.328254} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.697569] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Reconfigured VM instance instance-0000002e to attach disk [datastore2] a10f5b03-c45b-4cc2-923f-3227665d236c/a10f5b03-c45b-4cc2-923f-3227665d236c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.698241] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3da4634-95ad-46a1-98ce-7bb284cbbb74 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.700839] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5a2ba7-507c-47d3-af5b-bb7ea0b105da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.716819] env[62383]: DEBUG nova.compute.provider_tree [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 728.720405] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 728.720405] env[62383]: value = "task-2451479" [ 728.720405] env[62383]: _type = "Task" [ 728.720405] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.730455] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451479, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.751141] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075912} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.751411] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.752365] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de1afc3-3644-472a-aede-99bf5447a218 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.767186] env[62383]: DEBUG nova.compute.manager [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 728.767384] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.776904] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 563840a8-8fa7-4bfa-9912-933c14e7076a/563840a8-8fa7-4bfa-9912-933c14e7076a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.780237] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab47003-59d0-448b-8bd7-7ea1e2f8ae45 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.783075] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0bf70f0-0658-456f-84b0-320b57af295e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.809066] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.813137] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73205286-0a0b-48b4-86e9-8b5cdacec679 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.814826] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 728.814826] env[62383]: value = "task-2451480" [ 728.814826] env[62383]: _type = "Task" [ 728.814826] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.815371] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451478, 'name': ReconfigVM_Task, 'duration_secs': 0.304896} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.815450] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.819304] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e025f71-c53a-419c-849d-c14033b3535c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.821924] env[62383]: DEBUG oslo_vmware.api [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 728.821924] env[62383]: value = "task-2451481" [ 728.821924] env[62383]: _type = "Task" [ 728.821924] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.830802] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.835260] env[62383]: DEBUG oslo_vmware.api [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451481, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.835561] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 728.835561] env[62383]: value = "task-2451482" [ 728.835561] env[62383]: _type = "Task" [ 728.835561] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.847911] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451482, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.860224] env[62383]: DEBUG nova.network.neutron [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Successfully updated port: 5773169e-f9fe-4180-8237-10b88641ce09 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 728.889713] env[62383]: DEBUG nova.network.neutron [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Updating instance_info_cache with network_info: [{"id": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "address": "fa:16:3e:69:c0:d4", "network": {"id": "a6cf92ab-f9be-4021-bd8d-9e4b246d8dc1", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1324527743-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "393ba5b56857422eaee92696b56dc23d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73ec5d4d-c6", "ovs_interfaceid": "73ec5d4d-c675-4804-a31f-e92bdc8286fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.991913] env[62383]: DEBUG nova.network.neutron [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Updated VIF entry in instance network info cache for port 9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 728.992317] env[62383]: DEBUG nova.network.neutron [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Updating instance_info_cache with network_info: [{"id": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "address": "fa:16:3e:15:4a:e6", "network": {"id": "024ab25b-2360-4443-a441-3172fd2cf74b", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-183456731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e676769a71c843b6966b648ef3525fee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0467beaa-08c6-44d6-b8a2-e9c609c21ff4", "external-id": "nsx-vlan-transportzone-540", "segmentation_id": 540, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ff3b35c-c2", "ovs_interfaceid": "9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.223849] env[62383]: DEBUG nova.network.neutron [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updated VIF entry in instance network info cache for port 796c3e3e-48f2-4d7f-8f7d-974f792c4426. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 729.224537] env[62383]: DEBUG nova.network.neutron [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance_info_cache with network_info: [{"id": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "address": "fa:16:3e:9b:eb:a7", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap796c3e3e-48", "ovs_interfaceid": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.239172] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451479, 'name': Rename_Task, 'duration_secs': 0.166909} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.240525] env[62383]: ERROR nova.scheduler.client.report [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [req-939c1d1d-986f-494d-bfbc-397247cd81a1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-939c1d1d-986f-494d-bfbc-397247cd81a1"}]} [ 729.241105] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.243708] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d14fe99e-799f-457e-8b67-914d470b4413 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.253686] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 729.253686] env[62383]: value = "task-2451483" [ 729.253686] env[62383]: _type = "Task" [ 729.253686] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.262413] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451483, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.264231] env[62383]: DEBUG nova.scheduler.client.report [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 729.281976] env[62383]: DEBUG nova.scheduler.client.report [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 729.282597] env[62383]: DEBUG nova.compute.provider_tree [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 729.297763] env[62383]: DEBUG nova.scheduler.client.report [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 729.321806] env[62383]: DEBUG nova.scheduler.client.report [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 729.329450] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451480, 'name': ReconfigVM_Task, 'duration_secs': 0.38561} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.329783] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 563840a8-8fa7-4bfa-9912-933c14e7076a/563840a8-8fa7-4bfa-9912-933c14e7076a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.330841] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6119781b-751f-4611-9b68-ccdacd9f9765 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.336085] env[62383]: DEBUG oslo_vmware.api [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451481, 'name': PowerOffVM_Task, 'duration_secs': 0.220876} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.339219] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 729.339746] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 729.340687] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-348d0701-7960-450d-8aa7-83039513dfcf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.342174] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 729.342174] env[62383]: value = "task-2451484" [ 729.342174] env[62383]: _type = "Task" [ 729.342174] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.350536] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451482, 'name': Rename_Task, 'duration_secs': 0.162249} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.351339] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.351700] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dcf1ac3-769e-4300-b289-5b07d96b39cb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.360448] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "refresh_cache-0f48434f-859f-4910-883f-2f81be647bad" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.360698] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "refresh_cache-0f48434f-859f-4910-883f-2f81be647bad" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.360775] env[62383]: DEBUG nova.network.neutron [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 729.362353] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451484, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.365714] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 729.365714] env[62383]: value = "task-2451486" [ 729.365714] env[62383]: _type = "Task" [ 729.365714] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.379531] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451486, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.393453] env[62383]: DEBUG oslo_concurrency.lockutils [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Releasing lock "refresh_cache-583138d1-f928-4e33-a443-11c627203c44" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.436695] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 729.437128] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 729.437436] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Deleting the datastore file [datastore2] f28beb17-8455-49d3-8be0-7636b9abe4e8 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 729.438116] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a909190d-e9d4-4ed4-958a-3cc65448f471 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.447804] env[62383]: DEBUG oslo_vmware.api [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for the task: (returnval){ [ 729.447804] env[62383]: value = "task-2451487" [ 729.447804] env[62383]: _type = "Task" [ 729.447804] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.460318] env[62383]: DEBUG oslo_vmware.api [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.496942] env[62383]: DEBUG oslo_concurrency.lockutils [req-6564adf3-e180-444a-9e25-46c218cd14d1 req-2162b61f-e7b5-4cf0-9840-f7bf5db99dfd service nova] Releasing lock "refresh_cache-8bd05dac-7aa2-44c5-8752-6045c01d213d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.731415] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Releasing lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.731732] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Received event network-vif-plugged-1ee3599c-2ed4-4e36-9e36-3446e6178380 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 729.731992] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Acquiring lock "563840a8-8fa7-4bfa-9912-933c14e7076a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.732298] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.732531] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.732763] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] No waiting events found dispatching network-vif-plugged-1ee3599c-2ed4-4e36-9e36-3446e6178380 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 729.732996] env[62383]: WARNING nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Received unexpected event network-vif-plugged-1ee3599c-2ed4-4e36-9e36-3446e6178380 for instance with vm_state building and task_state spawning. [ 729.733257] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Received event network-changed-1ee3599c-2ed4-4e36-9e36-3446e6178380 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 729.733578] env[62383]: DEBUG nova.compute.manager [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Refreshing instance network info cache due to event network-changed-1ee3599c-2ed4-4e36-9e36-3446e6178380. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 729.733727] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Acquiring lock "refresh_cache-563840a8-8fa7-4bfa-9912-933c14e7076a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.733922] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Acquired lock "refresh_cache-563840a8-8fa7-4bfa-9912-933c14e7076a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.734163] env[62383]: DEBUG nova.network.neutron [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Refreshing network info cache for port 1ee3599c-2ed4-4e36-9e36-3446e6178380 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 729.767377] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451483, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.856346] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451484, 'name': Rename_Task, 'duration_secs': 0.286359} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.860674] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 729.861463] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02c6160e-27ea-4241-8826-4eebf55a5ef6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.873640] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 729.873640] env[62383]: value = "task-2451488" [ 729.873640] env[62383]: _type = "Task" [ 729.873640] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.885568] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451486, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.892459] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.898985] env[62383]: DEBUG nova.compute.manager [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 729.899751] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0641e81f-8db3-4fc8-a14d-c996d701e7a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.902864] env[62383]: DEBUG nova.network.neutron [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.906507] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34394136-6d12-4bf5-995b-d756c9276f28 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.920350] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5fa898-e5c6-41fe-98bb-7c537d9c589a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.969331] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fecd6e8-d25a-4b00-970c-5c21df3a3326 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.984037] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd732df-1584-44ce-9ce1-358455470254 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.986738] env[62383]: DEBUG oslo_vmware.api [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Task: {'id': task-2451487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.29855} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.987649] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.987846] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.988037] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.988216] env[62383]: INFO nova.compute.manager [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Took 1.22 seconds to destroy the instance on the hypervisor. [ 729.991010] env[62383]: DEBUG oslo.service.loopingcall [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.991010] env[62383]: DEBUG nova.compute.manager [-] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 729.991010] env[62383]: DEBUG nova.network.neutron [-] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.999768] env[62383]: DEBUG nova.compute.provider_tree [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 730.078112] env[62383]: DEBUG nova.network.neutron [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Updating instance_info_cache with network_info: [{"id": "5773169e-f9fe-4180-8237-10b88641ce09", "address": "fa:16:3e:05:73:5f", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5773169e-f9", "ovs_interfaceid": "5773169e-f9fe-4180-8237-10b88641ce09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.265876] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451483, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.377750] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451486, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.385858] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451488, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.466195] env[62383]: DEBUG nova.network.neutron [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Updated VIF entry in instance network info cache for port 1ee3599c-2ed4-4e36-9e36-3446e6178380. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 730.466571] env[62383]: DEBUG nova.network.neutron [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Updating instance_info_cache with network_info: [{"id": "1ee3599c-2ed4-4e36-9e36-3446e6178380", "address": "fa:16:3e:3e:ad:97", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ee3599c-2e", "ovs_interfaceid": "1ee3599c-2ed4-4e36-9e36-3446e6178380", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.475638] env[62383]: DEBUG nova.compute.manager [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.476630] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce471bf-ec1f-421b-a5b0-31d28b567599 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.545377] env[62383]: DEBUG nova.scheduler.client.report [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 730.548264] env[62383]: DEBUG nova.compute.provider_tree [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 72 to 73 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 730.548264] env[62383]: DEBUG nova.compute.provider_tree [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 730.582434] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "refresh_cache-0f48434f-859f-4910-883f-2f81be647bad" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.582434] env[62383]: DEBUG nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Instance network_info: |[{"id": "5773169e-f9fe-4180-8237-10b88641ce09", "address": "fa:16:3e:05:73:5f", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5773169e-f9", "ovs_interfaceid": "5773169e-f9fe-4180-8237-10b88641ce09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 730.582734] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:73:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5773169e-f9fe-4180-8237-10b88641ce09', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.589111] env[62383]: DEBUG oslo.service.loopingcall [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 730.589477] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.589606] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39281e0e-e1d9-4258-8201-e16fb7a7072f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.612044] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.612044] env[62383]: value = "task-2451489" [ 730.612044] env[62383]: _type = "Task" [ 730.612044] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.621461] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451489, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.766777] env[62383]: DEBUG oslo_vmware.api [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451483, 'name': PowerOnVM_Task, 'duration_secs': 1.475009} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.767164] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.767508] env[62383]: INFO nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Took 12.82 seconds to spawn the instance on the hypervisor. [ 730.767995] env[62383]: DEBUG nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.769229] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eaed7c1-50fe-44ae-98d4-2e88ca27fa2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.878667] env[62383]: DEBUG oslo_vmware.api [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451486, 'name': PowerOnVM_Task, 'duration_secs': 1.438416} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.882598] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.882852] env[62383]: INFO nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Took 10.08 seconds to spawn the instance on the hypervisor. [ 730.883061] env[62383]: DEBUG nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.885079] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69ca488-c1a2-4582-8e00-dd58d35d9a54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.888778] env[62383]: DEBUG nova.compute.manager [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Received event network-vif-plugged-5773169e-f9fe-4180-8237-10b88641ce09 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 730.888988] env[62383]: DEBUG oslo_concurrency.lockutils [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] Acquiring lock "0f48434f-859f-4910-883f-2f81be647bad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.889221] env[62383]: DEBUG oslo_concurrency.lockutils [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] Lock "0f48434f-859f-4910-883f-2f81be647bad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.889393] env[62383]: DEBUG oslo_concurrency.lockutils [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] Lock "0f48434f-859f-4910-883f-2f81be647bad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.889558] env[62383]: DEBUG nova.compute.manager [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] No waiting events found dispatching network-vif-plugged-5773169e-f9fe-4180-8237-10b88641ce09 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 730.889721] env[62383]: WARNING nova.compute.manager [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Received unexpected event network-vif-plugged-5773169e-f9fe-4180-8237-10b88641ce09 for instance with vm_state building and task_state spawning. [ 730.889909] env[62383]: DEBUG nova.compute.manager [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Received event network-changed-5773169e-f9fe-4180-8237-10b88641ce09 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 730.890098] env[62383]: DEBUG nova.compute.manager [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Refreshing instance network info cache due to event network-changed-5773169e-f9fe-4180-8237-10b88641ce09. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 730.890303] env[62383]: DEBUG oslo_concurrency.lockutils [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] Acquiring lock "refresh_cache-0f48434f-859f-4910-883f-2f81be647bad" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.890414] env[62383]: DEBUG oslo_concurrency.lockutils [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] Acquired lock "refresh_cache-0f48434f-859f-4910-883f-2f81be647bad" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.890903] env[62383]: DEBUG nova.network.neutron [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Refreshing network info cache for port 5773169e-f9fe-4180-8237-10b88641ce09 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.898331] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451488, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.921416] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c0589d-f48c-4c13-b24e-1080e97a4f78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.928854] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Doing hard reboot of VM {{(pid=62383) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 730.929034] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-b1b6a64f-8b83-42e8-8a69-86cf21d2dd0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.935968] env[62383]: DEBUG oslo_vmware.api [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 730.935968] env[62383]: value = "task-2451490" [ 730.935968] env[62383]: _type = "Task" [ 730.935968] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.948380] env[62383]: DEBUG oslo_vmware.api [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451490, 'name': ResetVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.964434] env[62383]: DEBUG nova.compute.manager [req-6abb9858-268a-484f-8424-40f762a7908f req-cd565dd2-5d0c-4b25-8b1a-16be3555162d service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Received event network-vif-deleted-99fff832-18f0-4caa-85b2-428c5e2852a9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 730.965109] env[62383]: INFO nova.compute.manager [req-6abb9858-268a-484f-8424-40f762a7908f req-cd565dd2-5d0c-4b25-8b1a-16be3555162d service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Neutron deleted interface 99fff832-18f0-4caa-85b2-428c5e2852a9; detaching it from the instance and deleting it from the info cache [ 730.965109] env[62383]: DEBUG nova.network.neutron [req-6abb9858-268a-484f-8424-40f762a7908f req-cd565dd2-5d0c-4b25-8b1a-16be3555162d service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.970580] env[62383]: DEBUG oslo_concurrency.lockutils [req-e2fd72f9-c44a-4996-845f-59f98143cc0f req-3fd4f58b-5951-4c13-a68a-58a39dc57982 service nova] Releasing lock "refresh_cache-563840a8-8fa7-4bfa-9912-933c14e7076a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.992859] env[62383]: INFO nova.compute.manager [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] instance snapshotting [ 730.993239] env[62383]: DEBUG nova.objects.instance [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'flavor' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 731.051350] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.814s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.053929] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.713s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.055438] env[62383]: INFO nova.compute.claims [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 731.078689] env[62383]: INFO nova.scheduler.client.report [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Deleted allocations for instance 67d41910-54e1-48f1-b0d3-f34a62595ef2 [ 731.121293] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451489, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.205196] env[62383]: DEBUG nova.network.neutron [-] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.291875] env[62383]: INFO nova.compute.manager [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Took 57.19 seconds to build instance. [ 731.388963] env[62383]: DEBUG oslo_vmware.api [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451488, 'name': PowerOnVM_Task, 'duration_secs': 1.373175} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.389330] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.389663] env[62383]: INFO nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Took 7.92 seconds to spawn the instance on the hypervisor. [ 731.389865] env[62383]: DEBUG nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.390674] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568104ec-d024-412f-8530-d3da7de696be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.412021] env[62383]: INFO nova.compute.manager [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Took 50.76 seconds to build instance. [ 731.449539] env[62383]: DEBUG oslo_vmware.api [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451490, 'name': ResetVM_Task, 'duration_secs': 0.103722} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.450066] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Did hard reboot of VM {{(pid=62383) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 731.450146] env[62383]: DEBUG nova.compute.manager [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.450980] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c55a05-64cf-4373-a69c-77e51159e901 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.468681] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a6718174-02cd-4efa-b626-2b9232dd306b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.477676] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d967cd-7d96-45f6-8940-6a63fe0ef757 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.500357] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68146c25-1cb5-4b1a-bd17-4a8c809e874f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.519095] env[62383]: DEBUG nova.compute.manager [req-6abb9858-268a-484f-8424-40f762a7908f req-cd565dd2-5d0c-4b25-8b1a-16be3555162d service nova] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Detach interface failed, port_id=99fff832-18f0-4caa-85b2-428c5e2852a9, reason: Instance f28beb17-8455-49d3-8be0-7636b9abe4e8 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 731.534953] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf79184c-fcf4-4358-9e9f-472bc501f9f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.586064] env[62383]: DEBUG oslo_concurrency.lockutils [None req-18c86fd2-e9c7-4aa3-aafd-b6df32f7de3c tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "67d41910-54e1-48f1-b0d3-f34a62595ef2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.810s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.628993] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451489, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.708285] env[62383]: INFO nova.compute.manager [-] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Took 1.72 seconds to deallocate network for instance. [ 731.721392] env[62383]: DEBUG nova.network.neutron [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Updated VIF entry in instance network info cache for port 5773169e-f9fe-4180-8237-10b88641ce09. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 731.721502] env[62383]: DEBUG nova.network.neutron [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Updating instance_info_cache with network_info: [{"id": "5773169e-f9fe-4180-8237-10b88641ce09", "address": "fa:16:3e:05:73:5f", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5773169e-f9", "ovs_interfaceid": "5773169e-f9fe-4180-8237-10b88641ce09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.793494] env[62383]: DEBUG oslo_concurrency.lockutils [None req-880e9d85-f4e6-4529-8ef6-195e469d1ca7 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.426s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.913023] env[62383]: INFO nova.compute.manager [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Took 50.02 seconds to build instance. [ 731.914025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-379faf6d-972f-4f22-b899-d5935887a1ef tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.006s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.963449] env[62383]: DEBUG oslo_concurrency.lockutils [None req-587ef6cf-1b08-411d-a2d9-5964e997fe06 tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.574s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.045557] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 732.045904] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-271676df-81be-4ae7-b75b-42ce06fd7066 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.053613] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 732.053613] env[62383]: value = "task-2451491" [ 732.053613] env[62383]: _type = "Task" [ 732.053613] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.066429] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451491, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.123224] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451489, 'name': CreateVM_Task, 'duration_secs': 1.470628} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.125835] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.126693] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.126790] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.127140] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 732.127388] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b583f02-7670-4b09-b1ec-148929112790 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.133563] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 732.133563] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b5fa88-06f5-0fc6-f1ea-a2f9c818a08d" [ 732.133563] env[62383]: _type = "Task" [ 732.133563] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.142826] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b5fa88-06f5-0fc6-f1ea-a2f9c818a08d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.215174] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.224239] env[62383]: DEBUG oslo_concurrency.lockutils [req-d3c012d8-b74d-4b95-96d1-4ebe62c0dbc7 req-231e3bbc-c360-49dd-9b6c-64b5f5d5630d service nova] Releasing lock "refresh_cache-0f48434f-859f-4910-883f-2f81be647bad" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.298266] env[62383]: DEBUG nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.404083] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] Acquiring lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.404275] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] Acquired lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.404485] env[62383]: DEBUG nova.network.neutron [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 732.414936] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9cb833c8-07b8-46c2-88d8-1c91c09a2587 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.324s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.422221] env[62383]: DEBUG nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.570588] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451491, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.651814] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b5fa88-06f5-0fc6-f1ea-a2f9c818a08d, 'name': SearchDatastore_Task, 'duration_secs': 0.010163} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.651814] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.652174] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.652497] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.652759] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.653051] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.653409] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0612460e-ab78-4afd-ac9d-10e01e7aa6fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.663694] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.664045] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 732.665256] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d25e553b-6d32-4061-ad89-a2d22e426edf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.672114] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 732.672114] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521797e7-2771-ae32-1f78-ef1724ae93b5" [ 732.672114] env[62383]: _type = "Task" [ 732.672114] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.682465] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521797e7-2771-ae32-1f78-ef1724ae93b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.710659] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0b93c7-1705-4284-b2f8-181a3975fc58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.720718] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc63edd2-0881-4a60-b4cc-6803b7304aac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.757817] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe55c7d-32c4-4fb8-a642-6bc2c1ce6d35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.766288] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a1ecb8-c484-432d-823a-c2829eba2c87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.780917] env[62383]: DEBUG nova.compute.provider_tree [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.821285] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.928703] env[62383]: DEBUG nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 732.950048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.064984] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451491, 'name': CreateSnapshot_Task, 'duration_secs': 0.67576} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.067797] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 733.068857] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891225cb-8525-4853-88e3-368f5fa54685 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.182659] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521797e7-2771-ae32-1f78-ef1724ae93b5, 'name': SearchDatastore_Task, 'duration_secs': 0.011972} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.183484] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b58cac5b-469d-4a6a-889e-bc7b5152def3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.188398] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 733.188398] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5211decd-22c9-b081-a2ae-0d69f112c5d0" [ 733.188398] env[62383]: _type = "Task" [ 733.188398] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.196110] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5211decd-22c9-b081-a2ae-0d69f112c5d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.285230] env[62383]: DEBUG nova.scheduler.client.report [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 733.359594] env[62383]: DEBUG nova.network.neutron [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Updating instance_info_cache with network_info: [{"id": "45641616-950c-40e9-8a0f-76d8fd08bc82", "address": "fa:16:3e:0c:24:4a", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45641616-95", "ovs_interfaceid": "45641616-950c-40e9-8a0f-76d8fd08bc82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.362255] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.362468] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.362807] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.362912] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.363068] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.365013] env[62383]: INFO nova.compute.manager [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Terminating instance [ 733.453897] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.586761] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 733.587087] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-35ec6075-1449-4c5e-a97f-0c23b6e53757 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.597284] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 733.597284] env[62383]: value = "task-2451492" [ 733.597284] env[62383]: _type = "Task" [ 733.597284] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.606043] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451492, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.698973] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5211decd-22c9-b081-a2ae-0d69f112c5d0, 'name': SearchDatastore_Task, 'duration_secs': 0.053363} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.699532] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.699632] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 0f48434f-859f-4910-883f-2f81be647bad/0f48434f-859f-4910-883f-2f81be647bad.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 733.700186] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0f0b9b3-b1fa-4d8c-b8cf-185ef0e9ca6b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.708040] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 733.708040] env[62383]: value = "task-2451493" [ 733.708040] env[62383]: _type = "Task" [ 733.708040] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.716189] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.791271] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.737s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.791271] env[62383]: DEBUG nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 733.793852] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.045s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.794115] env[62383]: DEBUG nova.objects.instance [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lazy-loading 'resources' on Instance uuid a16193af-410e-4bf6-bb06-a97791cf6060 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 733.863385] env[62383]: DEBUG oslo_concurrency.lockutils [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] Releasing lock "refresh_cache-a10f5b03-c45b-4cc2-923f-3227665d236c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.863615] env[62383]: DEBUG nova.compute.manager [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Inject network info {{(pid=62383) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 733.863924] env[62383]: DEBUG nova.compute.manager [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] network_info to inject: |[{"id": "45641616-950c-40e9-8a0f-76d8fd08bc82", "address": "fa:16:3e:0c:24:4a", "network": {"id": "67c7c479-ddf0-4e8f-8f8f-2d0be4c2da7d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1342039942-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e20c8351a13a427db4fccbac7108c205", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45641616-95", "ovs_interfaceid": "45641616-950c-40e9-8a0f-76d8fd08bc82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 733.870494] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Reconfiguring VM instance to set the machine id {{(pid=62383) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 733.871680] env[62383]: DEBUG nova.compute.manager [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 733.871962] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.872305] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7c6dd7f-2db7-4b1f-8eb1-e28415b378e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.885153] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f6c387-72af-4b3d-b84d-da707ec0bac1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.893488] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 733.894856] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b62a24f3-e714-47c8-afd9-b289d72dfcf9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.896594] env[62383]: DEBUG oslo_vmware.api [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] Waiting for the task: (returnval){ [ 733.896594] env[62383]: value = "task-2451494" [ 733.896594] env[62383]: _type = "Task" [ 733.896594] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.901328] env[62383]: DEBUG oslo_vmware.api [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 733.901328] env[62383]: value = "task-2451495" [ 733.901328] env[62383]: _type = "Task" [ 733.901328] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.908426] env[62383]: DEBUG oslo_vmware.api [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] Task: {'id': task-2451494, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.914162] env[62383]: DEBUG oslo_vmware.api [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.025331] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "583138d1-f928-4e33-a443-11c627203c44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.026195] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.026195] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "583138d1-f928-4e33-a443-11c627203c44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.026820] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.026820] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.029500] env[62383]: INFO nova.compute.manager [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Terminating instance [ 734.111755] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451492, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.224664] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451493, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.298708] env[62383]: DEBUG nova.compute.utils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 734.303732] env[62383]: DEBUG nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Not allocating networking since 'none' was specified. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 734.408666] env[62383]: DEBUG oslo_vmware.api [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] Task: {'id': task-2451494, 'name': ReconfigVM_Task, 'duration_secs': 0.202004} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.411573] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-63b8f7d5-51a8-4fd0-8602-b404c8340534 tempest-ServersAdminTestJSON-696398814 tempest-ServersAdminTestJSON-696398814-project-admin] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Reconfigured VM instance to set the machine id {{(pid=62383) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 734.415995] env[62383]: DEBUG oslo_vmware.api [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451495, 'name': PowerOffVM_Task, 'duration_secs': 0.243797} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.416544] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.416544] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 734.416690] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfa897b7-dad9-4e73-8d47-c76bd4b733c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.483440] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.483690] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.483894] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Deleting the datastore file [datastore1] f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.484170] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-671351da-b580-4248-acb4-b1f9098b380c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.492541] env[62383]: DEBUG oslo_vmware.api [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 734.492541] env[62383]: value = "task-2451497" [ 734.492541] env[62383]: _type = "Task" [ 734.492541] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.505012] env[62383]: DEBUG oslo_vmware.api [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.534666] env[62383]: DEBUG nova.compute.manager [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 734.534888] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 734.536168] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f490e9a7-2b08-4419-8aac-f0e816cb15d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.543837] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 734.544573] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68fc0988-3ef1-46b7-9ebe-99e52ef60005 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.550229] env[62383]: DEBUG oslo_vmware.api [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 734.550229] env[62383]: value = "task-2451498" [ 734.550229] env[62383]: _type = "Task" [ 734.550229] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.558155] env[62383]: DEBUG oslo_vmware.api [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.608853] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451492, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.718698] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451493, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559806} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.721474] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 0f48434f-859f-4910-883f-2f81be647bad/0f48434f-859f-4910-883f-2f81be647bad.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 734.721703] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 734.722294] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed1db7f4-d6ee-4034-94dc-64d884af9203 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.728347] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 734.728347] env[62383]: value = "task-2451499" [ 734.728347] env[62383]: _type = "Task" [ 734.728347] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.741660] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.779156] env[62383]: DEBUG nova.compute.manager [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 734.804339] env[62383]: DEBUG nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 734.892986] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4901dc-9bc7-40f8-b685-4a42ecde9f25 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.901851] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d499093-3be6-4964-87ad-852fcb98da61 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.934206] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca85348d-7dff-4b4f-8e99-64b8117f74ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.941677] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c9156b-8381-4eef-9b03-a66c1513a00e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.956186] env[62383]: DEBUG nova.compute.provider_tree [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.004668] env[62383]: DEBUG oslo_vmware.api [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184442} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.004927] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.005152] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 735.005304] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 735.005477] env[62383]: INFO nova.compute.manager [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Took 1.13 seconds to destroy the instance on the hypervisor. [ 735.005723] env[62383]: DEBUG oslo.service.loopingcall [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.006117] env[62383]: DEBUG nova.compute.manager [-] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 735.006117] env[62383]: DEBUG nova.network.neutron [-] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.060304] env[62383]: DEBUG oslo_vmware.api [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451498, 'name': PowerOffVM_Task, 'duration_secs': 0.367081} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.060570] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 735.060736] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 735.060993] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f73358af-2af8-4610-9d82-f6069c887bc4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.108563] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451492, 'name': CloneVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.131812] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 735.132062] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 735.132250] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Deleting the datastore file [datastore1] 583138d1-f928-4e33-a443-11c627203c44 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 735.132531] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-506427aa-8386-4ba9-88f2-873de3a86470 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.139443] env[62383]: DEBUG oslo_vmware.api [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for the task: (returnval){ [ 735.139443] env[62383]: value = "task-2451501" [ 735.139443] env[62383]: _type = "Task" [ 735.139443] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.147794] env[62383]: DEBUG oslo_vmware.api [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.240593] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062343} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.240926] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.242562] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe879f66-064a-4752-aa1f-84c381326949 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.271130] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 0f48434f-859f-4910-883f-2f81be647bad/0f48434f-859f-4910-883f-2f81be647bad.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.271607] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcf83afa-99b5-4d44-aafe-77c62a1ace96 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.306014] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 735.306014] env[62383]: value = "task-2451502" [ 735.306014] env[62383]: _type = "Task" [ 735.306014] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.317088] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.321022] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451502, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.459941] env[62383]: DEBUG nova.scheduler.client.report [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 735.514522] env[62383]: DEBUG nova.compute.manager [req-cf614f36-4350-41db-bebe-a70dee9b0923 req-67da0741-37fc-49ae-9b23-df79a827e2fd service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Received event network-vif-deleted-996369ec-24dc-43dd-8380-b1f7a35e6557 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 735.515564] env[62383]: INFO nova.compute.manager [req-cf614f36-4350-41db-bebe-a70dee9b0923 req-67da0741-37fc-49ae-9b23-df79a827e2fd service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Neutron deleted interface 996369ec-24dc-43dd-8380-b1f7a35e6557; detaching it from the instance and deleting it from the info cache [ 735.515564] env[62383]: DEBUG nova.network.neutron [req-cf614f36-4350-41db-bebe-a70dee9b0923 req-67da0741-37fc-49ae-9b23-df79a827e2fd service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.613019] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451492, 'name': CloneVM_Task, 'duration_secs': 1.554646} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.613019] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Created linked-clone VM from snapshot [ 735.613019] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaed967b-0df5-4670-b4dd-f0b0c0f97b97 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.617945] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Uploading image d9e28366-6c3c-4e44-8e74-212ed2c92baa {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 735.645957] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 735.645957] env[62383]: value = "vm-496454" [ 735.645957] env[62383]: _type = "VirtualMachine" [ 735.645957] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 735.646315] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8903bc56-1aeb-4935-918a-032a4679e887 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.652720] env[62383]: DEBUG oslo_vmware.api [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.653942] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease: (returnval){ [ 735.653942] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5232d5c3-b057-e606-98e5-5d1319a5f5ff" [ 735.653942] env[62383]: _type = "HttpNfcLease" [ 735.653942] env[62383]: } obtained for exporting VM: (result){ [ 735.653942] env[62383]: value = "vm-496454" [ 735.653942] env[62383]: _type = "VirtualMachine" [ 735.653942] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 735.654199] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the lease: (returnval){ [ 735.654199] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5232d5c3-b057-e606-98e5-5d1319a5f5ff" [ 735.654199] env[62383]: _type = "HttpNfcLease" [ 735.654199] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 735.660405] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 735.660405] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5232d5c3-b057-e606-98e5-5d1319a5f5ff" [ 735.660405] env[62383]: _type = "HttpNfcLease" [ 735.660405] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 735.815592] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451502, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.819510] env[62383]: DEBUG nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 735.844252] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 735.844644] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.844918] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.845255] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.845521] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.845795] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 735.846513] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 735.846513] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 735.846682] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 735.846866] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 735.847066] env[62383]: DEBUG nova.virt.hardware [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 735.847960] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa55c3d-17a9-45ac-a39b-a15a68626c49 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.855910] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f4d6e4-bf0e-4f0f-beb1-63ab4cf1ef39 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.870626] env[62383]: DEBUG nova.network.neutron [-] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.871977] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.880446] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Creating folder: Project (450d3c3c078147cea0f34b155566615f). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.880446] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6702a6aa-6c48-4396-814b-4f93c7296fdf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.887915] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Created folder: Project (450d3c3c078147cea0f34b155566615f) in parent group-v496304. [ 735.888119] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Creating folder: Instances. Parent ref: group-v496455. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 735.888347] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85ccaa63-b0fa-4599-aa2d-2e7b030e092c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.896918] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Created folder: Instances in parent group-v496455. [ 735.897179] env[62383]: DEBUG oslo.service.loopingcall [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 735.897450] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.897573] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-737a5bc6-2b00-4cf4-bff5-21e50fa82845 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.914532] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.914532] env[62383]: value = "task-2451506" [ 735.914532] env[62383]: _type = "Task" [ 735.914532] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.922771] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451506, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.967780] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.968223] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.803s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.969056] env[62383]: DEBUG nova.objects.instance [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lazy-loading 'resources' on Instance uuid e51a0dd7-b5da-44cb-9cd8-62932aec3ad5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.997880] env[62383]: INFO nova.scheduler.client.report [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted allocations for instance a16193af-410e-4bf6-bb06-a97791cf6060 [ 736.017382] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8cf622b-370f-444b-87f5-3dc03251d61d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.028906] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d612bf0-c258-49e1-b496-c6c7dc0e4c16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.068514] env[62383]: DEBUG nova.compute.manager [req-cf614f36-4350-41db-bebe-a70dee9b0923 req-67da0741-37fc-49ae-9b23-df79a827e2fd service nova] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Detach interface failed, port_id=996369ec-24dc-43dd-8380-b1f7a35e6557, reason: Instance f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 736.152270] env[62383]: DEBUG oslo_vmware.api [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Task: {'id': task-2451501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.707404} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.152528] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 736.152723] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 736.152905] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 736.153095] env[62383]: INFO nova.compute.manager [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] [instance: 583138d1-f928-4e33-a443-11c627203c44] Took 1.62 seconds to destroy the instance on the hypervisor. [ 736.153343] env[62383]: DEBUG oslo.service.loopingcall [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 736.153535] env[62383]: DEBUG nova.compute.manager [-] [instance: 583138d1-f928-4e33-a443-11c627203c44] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 736.153630] env[62383]: DEBUG nova.network.neutron [-] [instance: 583138d1-f928-4e33-a443-11c627203c44] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 736.163202] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 736.163202] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5232d5c3-b057-e606-98e5-5d1319a5f5ff" [ 736.163202] env[62383]: _type = "HttpNfcLease" [ 736.163202] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 736.163202] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 736.163202] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5232d5c3-b057-e606-98e5-5d1319a5f5ff" [ 736.163202] env[62383]: _type = "HttpNfcLease" [ 736.163202] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 736.163699] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10417ef0-f252-481d-b77d-b947b3055a79 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.173772] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb4c44-7bc8-8536-59aa-0a0e996a4aba/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 736.174052] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb4c44-7bc8-8536-59aa-0a0e996a4aba/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 736.275216] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-681fd8ae-87f0-4632-b3a7-40db1f926ca7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.316164] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451502, 'name': ReconfigVM_Task, 'duration_secs': 0.760571} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.316164] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 0f48434f-859f-4910-883f-2f81be647bad/0f48434f-859f-4910-883f-2f81be647bad.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.316164] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e752290-47b6-4ce7-b85f-93c4385d33ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.322875] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 736.322875] env[62383]: value = "task-2451507" [ 736.322875] env[62383]: _type = "Task" [ 736.322875] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.331254] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451507, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.379441] env[62383]: INFO nova.compute.manager [-] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Took 1.37 seconds to deallocate network for instance. [ 736.425799] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451506, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.513435] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de36d06c-8f33-4347-99ec-86b618f8253d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "a16193af-410e-4bf6-bb06-a97791cf6060" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.316s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.835348] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451507, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.887528] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.932651] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451506, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.014049] env[62383]: DEBUG nova.network.neutron [-] [instance: 583138d1-f928-4e33-a443-11c627203c44] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.106753] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eba231bd-9285-4d36-a102-e3f34d673d3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.115938] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c051244-b22a-4652-9378-d109fc363fcf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.149762] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ecd87e5-2c0a-45ea-9536-a164f3d752d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.162969] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845d386b-1dd9-477b-b468-7c864ac652de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.177795] env[62383]: DEBUG nova.compute.provider_tree [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.336021] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451507, 'name': Rename_Task, 'duration_secs': 1.005572} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.336683] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 737.337062] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52bf11da-29f8-4d5f-bc48-5bba051407b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.343889] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 737.343889] env[62383]: value = "task-2451508" [ 737.343889] env[62383]: _type = "Task" [ 737.343889] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.354956] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.426327] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451506, 'name': CreateVM_Task, 'duration_secs': 1.291581} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.426582] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.427046] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.427245] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.427708] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.428031] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d772c4-fb51-4722-ac8b-5ae28fa38230 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.433381] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 737.433381] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fae677-19ea-18b3-00ef-df8077426312" [ 737.433381] env[62383]: _type = "Task" [ 737.433381] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.441296] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fae677-19ea-18b3-00ef-df8077426312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.519159] env[62383]: INFO nova.compute.manager [-] [instance: 583138d1-f928-4e33-a443-11c627203c44] Took 1.37 seconds to deallocate network for instance. [ 737.621563] env[62383]: DEBUG nova.compute.manager [req-4a045888-0c45-4497-b05a-0bbd9b2db5ac req-38a39fc8-f807-4745-ba1a-33bc40e2345b service nova] [instance: 583138d1-f928-4e33-a443-11c627203c44] Received event network-vif-deleted-73ec5d4d-c675-4804-a31f-e92bdc8286fd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 737.663590] env[62383]: DEBUG nova.compute.manager [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 737.664492] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0608ff6d-cb43-46e1-baec-d1daa2508d41 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.682406] env[62383]: DEBUG nova.scheduler.client.report [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.857289] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451508, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.947320] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fae677-19ea-18b3-00ef-df8077426312, 'name': SearchDatastore_Task, 'duration_secs': 0.016971} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.947997] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.948409] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.948763] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.948980] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.949217] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.949577] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b48a3357-f8f5-418d-bc60-fe5a5291fb23 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.958137] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.958419] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.959282] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59bc6a20-9a57-407b-a3de-9700cd3012f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.965495] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 737.965495] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52718080-e281-2b63-1d78-aef9c73a1adb" [ 737.965495] env[62383]: _type = "Task" [ 737.965495] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.974280] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52718080-e281-2b63-1d78-aef9c73a1adb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.019281] env[62383]: INFO nova.compute.manager [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Rebuilding instance [ 738.026683] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.062675] env[62383]: DEBUG nova.compute.manager [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.063845] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6dbfd9-0daa-4283-ad2e-3e0a267ac2d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.179626] env[62383]: INFO nova.compute.manager [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] instance snapshotting [ 738.183125] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9a5594-29fb-4059-8685-b8660142c6c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.187742] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.220s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.204649] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.439s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.204907] env[62383]: DEBUG nova.objects.instance [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lazy-loading 'resources' on Instance uuid 14bb9b79-d224-4a64-861e-30dd919c5741 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 738.207148] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b56fab7-e4fe-4e49-857e-18838f2f2384 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.221244] env[62383]: INFO nova.scheduler.client.report [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Deleted allocations for instance e51a0dd7-b5da-44cb-9cd8-62932aec3ad5 [ 738.354626] env[62383]: DEBUG oslo_vmware.api [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451508, 'name': PowerOnVM_Task, 'duration_secs': 0.661899} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.355691] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.355691] env[62383]: INFO nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Took 10.10 seconds to spawn the instance on the hypervisor. [ 738.355691] env[62383]: DEBUG nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.356410] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e77f100-e1b6-44b6-ac0d-49ef1dd39913 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.476629] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52718080-e281-2b63-1d78-aef9c73a1adb, 'name': SearchDatastore_Task, 'duration_secs': 0.01148} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.477395] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9846a512-2d33-4433-bbc8-bbda37df6c93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.482544] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 738.482544] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5298b52c-b9b2-6ca1-01a0-8808b76a3b5f" [ 738.482544] env[62383]: _type = "Task" [ 738.482544] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.490507] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5298b52c-b9b2-6ca1-01a0-8808b76a3b5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.718522] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 738.719690] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7617e6b1-8a6f-4dd7-8c1f-7346839c2ba8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.729353] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 738.729353] env[62383]: value = "task-2451509" [ 738.729353] env[62383]: _type = "Task" [ 738.729353] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.729710] env[62383]: DEBUG oslo_concurrency.lockutils [None req-08592c0a-577c-4ec4-92df-0f71a76c194b tempest-ServersTestManualDisk-1778785197 tempest-ServersTestManualDisk-1778785197-project-member] Lock "e51a0dd7-b5da-44cb-9cd8-62932aec3ad5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.758s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.742264] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451509, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.876727] env[62383]: INFO nova.compute.manager [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Took 48.22 seconds to build instance. [ 738.994831] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5298b52c-b9b2-6ca1-01a0-8808b76a3b5f, 'name': SearchDatastore_Task, 'duration_secs': 0.01446} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.997703] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.998939] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9/3f508af0-68a2-4898-b9ae-d84cdb8a4cd9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.998939] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47c6c643-cfa0-4e68-b52f-d03bb741e490 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.007892] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 739.007892] env[62383]: value = "task-2451510" [ 739.007892] env[62383]: _type = "Task" [ 739.007892] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.019759] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.081068] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.082411] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c02143b4-aa9f-48bc-b894-bac0b0ebbe66 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.091818] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 739.091818] env[62383]: value = "task-2451511" [ 739.091818] env[62383]: _type = "Task" [ 739.091818] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.102469] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.245375] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451509, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.327435] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a929fbe-0318-400c-8fe0-86ab9df82a4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.341197] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3578c03-0b28-4e67-8ad3-b0976f36367c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.372889] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e416861-03c7-474f-8b42-0ccc55cc252e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.380066] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a6806f3b-cdb3-4c4c-a3d8-7b57c9c5de82 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "0f48434f-859f-4910-883f-2f81be647bad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.624s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.384022] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282cf191-3694-4ad4-9961-1dac194df9c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.398648] env[62383]: DEBUG nova.compute.provider_tree [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.522694] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451510, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.602894] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451511, 'name': PowerOffVM_Task, 'duration_secs': 0.268278} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.603244] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.603586] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 739.604806] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e03014-28ff-4fa9-b827-e6c225e3d0f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.613123] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 739.613409] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-704da9b8-b806-4887-9046-a3f3b935776c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.693818] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 739.694071] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 739.694225] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleting the datastore file [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.694510] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fafe5b02-b69f-4745-98cc-bfd6a5bbccd8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.701922] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 739.701922] env[62383]: value = "task-2451513" [ 739.701922] env[62383]: _type = "Task" [ 739.701922] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.710415] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.742043] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451509, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.888469] env[62383]: DEBUG nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 739.902300] env[62383]: DEBUG nova.scheduler.client.report [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.021317] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624387} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.021317] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9/3f508af0-68a2-4898-b9ae-d84cdb8a4cd9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 740.021317] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 740.021317] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-195b9630-10d9-41ef-bf2a-a7b732949407 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.026481] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 740.026481] env[62383]: value = "task-2451514" [ 740.026481] env[62383]: _type = "Task" [ 740.026481] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.034780] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451514, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.217438] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451513, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35539} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.217666] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 740.217858] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 740.218056] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 740.241293] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451509, 'name': CreateSnapshot_Task, 'duration_secs': 1.195319} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.241593] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 740.242373] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e87e8fc-bdec-4cf7-989a-312b0070d9da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.401744] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "0f48434f-859f-4910-883f-2f81be647bad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.401988] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "0f48434f-859f-4910-883f-2f81be647bad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.402553] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "0f48434f-859f-4910-883f-2f81be647bad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.402751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "0f48434f-859f-4910-883f-2f81be647bad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.402923] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "0f48434f-859f-4910-883f-2f81be647bad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.404687] env[62383]: INFO nova.compute.manager [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Terminating instance [ 740.408422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.410942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.272s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.412733] env[62383]: INFO nova.compute.claims [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.419135] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.437197] env[62383]: INFO nova.scheduler.client.report [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Deleted allocations for instance 14bb9b79-d224-4a64-861e-30dd919c5741 [ 740.539207] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451514, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081836} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.539345] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.540214] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94602d3e-be99-4f71-8b11-e21dfa71eda6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.561151] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9/3f508af0-68a2-4898-b9ae-d84cdb8a4cd9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.561922] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e82662af-e4a5-4ec2-a6b8-967926100be0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.585895] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 740.585895] env[62383]: value = "task-2451515" [ 740.585895] env[62383]: _type = "Task" [ 740.585895] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.597245] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451515, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.760964] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 740.762366] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-27d42282-49bd-4af8-8fb1-de6ed788dad5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.774179] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 740.774179] env[62383]: value = "task-2451516" [ 740.774179] env[62383]: _type = "Task" [ 740.774179] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.782779] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451516, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.913099] env[62383]: DEBUG nova.compute.manager [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 740.913099] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.915069] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49061056-68b8-4d2c-9b7f-a6690f532f51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.924695] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 740.925096] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-117dd620-3e07-4d54-93a3-d4a0ce149567 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.931333] env[62383]: DEBUG oslo_vmware.api [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 740.931333] env[62383]: value = "task-2451517" [ 740.931333] env[62383]: _type = "Task" [ 740.931333] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.941787] env[62383]: DEBUG oslo_vmware.api [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.945773] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ba07d8e-39a6-4ed2-b991-b9aa9eebea61 tempest-VolumesAssistedSnapshotsTest-1080832521 tempest-VolumesAssistedSnapshotsTest-1080832521-project-member] Lock "14bb9b79-d224-4a64-861e-30dd919c5741" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.656s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.100459] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451515, 'name': ReconfigVM_Task, 'duration_secs': 0.446179} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.100825] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9/3f508af0-68a2-4898-b9ae-d84cdb8a4cd9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 741.101766] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9382f3ec-d703-4879-bc8e-60183b381fc9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.111143] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 741.111143] env[62383]: value = "task-2451518" [ 741.111143] env[62383]: _type = "Task" [ 741.111143] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.121505] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451518, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.261098] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 741.261098] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.261618] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 741.261931] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.262293] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 741.262657] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 741.265173] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 741.265173] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 741.265173] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 741.265173] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 741.265173] env[62383]: DEBUG nova.virt.hardware [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 741.265480] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fb9ba7-50d4-4090-903d-e4fa5ff1ce8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.281717] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c944a27-5163-4742-a0c5-191efab39da7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.302245] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451516, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.302896] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:cf:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1d30299-f4ce-40b4-9046-fd1d10565fd3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.311884] env[62383]: DEBUG oslo.service.loopingcall [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 741.312303] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 741.312778] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a37b140b-da90-4b8a-bd01-69c6e4c9a2de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.337200] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.337200] env[62383]: value = "task-2451519" [ 741.337200] env[62383]: _type = "Task" [ 741.337200] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.346867] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451519, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.444174] env[62383]: DEBUG oslo_vmware.api [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451517, 'name': PowerOffVM_Task, 'duration_secs': 0.233205} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.444588] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 741.444971] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 741.445379] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7024170-7caf-403e-bc54-0a44c72aa562 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.521447] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 741.521755] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 741.521966] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleting the datastore file [datastore1] 0f48434f-859f-4910-883f-2f81be647bad {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 741.522603] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdca7694-2eac-4ebf-b896-42cb0f1201c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.534112] env[62383]: DEBUG oslo_vmware.api [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 741.534112] env[62383]: value = "task-2451521" [ 741.534112] env[62383]: _type = "Task" [ 741.534112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.545846] env[62383]: DEBUG oslo_vmware.api [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451521, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.625695] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451518, 'name': Rename_Task, 'duration_secs': 0.154603} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.625979] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.626259] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-658661fb-6d82-4b09-91f2-f51d868a48d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.633161] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 741.633161] env[62383]: value = "task-2451522" [ 741.633161] env[62383]: _type = "Task" [ 741.633161] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.646701] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.787570] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451516, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.848327] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451519, 'name': CreateVM_Task, 'duration_secs': 0.387117} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.848668] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.849294] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.849545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.849921] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 741.852942] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da78c19d-11d7-4e03-a15e-05ea7e58921d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.859126] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 741.859126] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5266e0ad-cdba-a83f-40e7-5e1d9f11d836" [ 741.859126] env[62383]: _type = "Task" [ 741.859126] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.869189] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5266e0ad-cdba-a83f-40e7-5e1d9f11d836, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.053639] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7f8ce9-2e60-46f4-a908-3ad9039258e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.061624] env[62383]: DEBUG oslo_vmware.api [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451521, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141509} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.062483] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 742.063142] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 742.063361] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 742.063606] env[62383]: INFO nova.compute.manager [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Took 1.15 seconds to destroy the instance on the hypervisor. [ 742.063867] env[62383]: DEBUG oslo.service.loopingcall [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 742.064171] env[62383]: DEBUG nova.compute.manager [-] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 742.064331] env[62383]: DEBUG nova.network.neutron [-] [instance: 0f48434f-859f-4910-883f-2f81be647bad] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.069490] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5848577c-1353-40e4-b872-37ea595cd6c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.103117] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed61896-fa78-4e23-a77b-9e32ca7058df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.114828] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3b515c-e2cf-4251-853d-712d08b24f93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.140170] env[62383]: DEBUG nova.compute.provider_tree [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 742.151688] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451522, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.286168] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451516, 'name': CloneVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.370143] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5266e0ad-cdba-a83f-40e7-5e1d9f11d836, 'name': SearchDatastore_Task, 'duration_secs': 0.017305} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.370491] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.370881] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.370983] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.371675] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.371675] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.371675] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dfbebcb-6cb9-4783-98a6-472fae94cab2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.382659] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.383592] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 742.383852] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7855cd7-b201-4c36-9b48-cef0db137fc8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.391813] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 742.391813] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522e8433-2d7a-52d7-cf01-83535f79b4fc" [ 742.391813] env[62383]: _type = "Task" [ 742.391813] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.402242] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522e8433-2d7a-52d7-cf01-83535f79b4fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.659209] env[62383]: DEBUG oslo_vmware.api [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451522, 'name': PowerOnVM_Task, 'duration_secs': 0.542731} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.663882] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.663882] env[62383]: INFO nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Took 6.84 seconds to spawn the instance on the hypervisor. [ 742.663882] env[62383]: DEBUG nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 742.663882] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0de38a-5b6b-4932-b7f1-d1ce3569f12d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.683563] env[62383]: ERROR nova.scheduler.client.report [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [req-1d499c2f-65a4-48e8-bbac-9084cfa3a16f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1d499c2f-65a4-48e8-bbac-9084cfa3a16f"}]} [ 742.706947] env[62383]: DEBUG nova.scheduler.client.report [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 742.723419] env[62383]: DEBUG nova.scheduler.client.report [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 742.723652] env[62383]: DEBUG nova.compute.provider_tree [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 742.738466] env[62383]: DEBUG nova.scheduler.client.report [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 742.760065] env[62383]: DEBUG nova.scheduler.client.report [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 742.786398] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451516, 'name': CloneVM_Task, 'duration_secs': 1.602719} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.789582] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Created linked-clone VM from snapshot [ 742.790814] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6e689b-d634-4770-86dd-9049c2dc6883 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.799864] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Uploading image f2d66fc2-9067-43f0-93a8-d1a70a4c9f97 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 742.817880] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 742.818221] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-027f156c-bfa6-40fb-b336-a04960f26842 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.830358] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 742.830358] env[62383]: value = "task-2451523" [ 742.830358] env[62383]: _type = "Task" [ 742.830358] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.853869] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451523, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.909304] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522e8433-2d7a-52d7-cf01-83535f79b4fc, 'name': SearchDatastore_Task, 'duration_secs': 0.0163} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.910507] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf9e05aa-9bee-46b6-901d-b09167d76b7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.917037] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 742.917037] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52551416-7b8d-f027-b309-09a630ce9be1" [ 742.917037] env[62383]: _type = "Task" [ 742.917037] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.929253] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52551416-7b8d-f027-b309-09a630ce9be1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.186974] env[62383]: INFO nova.compute.manager [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Took 45.87 seconds to build instance. [ 743.345028] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451523, 'name': Destroy_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.386426] env[62383]: DEBUG nova.compute.manager [req-5dd35eed-66c1-4a91-bfc3-98d9ecc9413b req-de8b84c7-4a6c-4e60-a24a-46a27050fe39 service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Received event network-vif-deleted-5773169e-f9fe-4180-8237-10b88641ce09 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 743.386426] env[62383]: INFO nova.compute.manager [req-5dd35eed-66c1-4a91-bfc3-98d9ecc9413b req-de8b84c7-4a6c-4e60-a24a-46a27050fe39 service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Neutron deleted interface 5773169e-f9fe-4180-8237-10b88641ce09; detaching it from the instance and deleting it from the info cache [ 743.386426] env[62383]: DEBUG nova.network.neutron [req-5dd35eed-66c1-4a91-bfc3-98d9ecc9413b req-de8b84c7-4a6c-4e60-a24a-46a27050fe39 service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.393843] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08abd606-ef43-40b3-90d0-0bfffe491699 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.402540] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d773e3d-8ef0-4c01-9e03-9f610e8f734c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.441381] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1841fef8-f65a-4dc6-b287-8e8e6c0e0588 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.450840] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52551416-7b8d-f027-b309-09a630ce9be1, 'name': SearchDatastore_Task, 'duration_secs': 0.016243} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.453700] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.454143] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 743.458018] env[62383]: DEBUG nova.network.neutron [-] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.458018] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab8463da-96f6-47d6-890f-47a86cc036c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.459467] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9253d713-e5e9-4ddd-9bb9-f11e9236a96c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.478026] env[62383]: DEBUG nova.compute.provider_tree [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 743.481188] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 743.481188] env[62383]: value = "task-2451524" [ 743.481188] env[62383]: _type = "Task" [ 743.481188] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.491739] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.686545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ede798e8-9eb7-4355-9656-bbda38ca031e tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.116s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.844746] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451523, 'name': Destroy_Task, 'duration_secs': 0.517078} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.845089] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Destroyed the VM [ 743.845371] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 743.845652] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9f28ec16-a438-4b69-b6e1-8440ff78e4fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.858434] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 743.858434] env[62383]: value = "task-2451525" [ 743.858434] env[62383]: _type = "Task" [ 743.858434] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.872714] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451525, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.890191] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a1d1a1f-79a6-4943-8eee-9a1eade9e8e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.906246] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5401db48-822b-41db-9f32-610a76872318 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.955351] env[62383]: DEBUG nova.compute.manager [req-5dd35eed-66c1-4a91-bfc3-98d9ecc9413b req-de8b84c7-4a6c-4e60-a24a-46a27050fe39 service nova] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Detach interface failed, port_id=5773169e-f9fe-4180-8237-10b88641ce09, reason: Instance 0f48434f-859f-4910-883f-2f81be647bad could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 743.965041] env[62383]: INFO nova.compute.manager [-] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Took 1.90 seconds to deallocate network for instance. [ 744.002937] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451524, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.032304] env[62383]: DEBUG nova.scheduler.client.report [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 744.032304] env[62383]: DEBUG nova.compute.provider_tree [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 75 to 76 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 744.034093] env[62383]: DEBUG nova.compute.provider_tree [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 744.190244] env[62383]: DEBUG nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.371048] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451525, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.472302] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.498243] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451524, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.720912} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.498513] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 744.498770] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 744.499098] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24aef362-d348-40ce-94ec-ba24335cfacf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.511023] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 744.511023] env[62383]: value = "task-2451526" [ 744.511023] env[62383]: _type = "Task" [ 744.511023] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.522592] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451526, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.538361] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.127s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.538948] env[62383]: DEBUG nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 744.544624] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.884s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.547996] env[62383]: INFO nova.compute.claims [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.716251] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.772696] env[62383]: DEBUG nova.compute.manager [None req-8c863d45-2922-4221-bc07-1801612d1eaa tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.773660] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5ba85d-b4ea-495c-81c5-70d2bc64d33a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.874844] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451525, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.006277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.006654] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.006883] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.007088] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.007277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.009606] env[62383]: INFO nova.compute.manager [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Terminating instance [ 745.022437] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451526, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094354} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.023433] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.024152] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31948ea7-6f61-4100-bd3a-865499f61cac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.050818] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.050818] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e10dd651-8936-4bbb-bf4a-39e11b061590 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.068672] env[62383]: DEBUG nova.compute.utils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.071062] env[62383]: DEBUG nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 745.071062] env[62383]: DEBUG nova.network.neutron [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.082758] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 745.082758] env[62383]: value = "task-2451527" [ 745.082758] env[62383]: _type = "Task" [ 745.082758] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.095441] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451527, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.155657] env[62383]: DEBUG nova.policy [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '671a6c1983c64c26b3ea501f171045d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bed29fa2bc64a31b3324d7d0d01c61d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.288063] env[62383]: INFO nova.compute.manager [None req-8c863d45-2922-4221-bc07-1801612d1eaa tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] instance snapshotting [ 745.288870] env[62383]: DEBUG nova.objects.instance [None req-8c863d45-2922-4221-bc07-1801612d1eaa tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lazy-loading 'flavor' on Instance uuid 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 745.362812] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb4c44-7bc8-8536-59aa-0a0e996a4aba/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 745.363837] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac28963-b7e0-4878-bdd0-f1563693de09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.373753] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb4c44-7bc8-8536-59aa-0a0e996a4aba/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 745.374147] env[62383]: ERROR oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb4c44-7bc8-8536-59aa-0a0e996a4aba/disk-0.vmdk due to incomplete transfer. [ 745.377156] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2edaec32-d808-467f-adf1-cbe2de42f0d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.378894] env[62383]: DEBUG oslo_vmware.api [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451525, 'name': RemoveSnapshot_Task, 'duration_secs': 1.067193} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.379510] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 745.390934] env[62383]: DEBUG oslo_vmware.rw_handles [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cb4c44-7bc8-8536-59aa-0a0e996a4aba/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 745.390934] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Uploaded image d9e28366-6c3c-4e44-8e74-212ed2c92baa to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 745.391931] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 745.392241] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b9842670-dc43-4601-abba-1c7126637e45 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.399531] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 745.399531] env[62383]: value = "task-2451528" [ 745.399531] env[62383]: _type = "Task" [ 745.399531] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.412834] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451528, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.517591] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "refresh_cache-3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.517792] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquired lock "refresh_cache-3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.517974] env[62383]: DEBUG nova.network.neutron [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.574818] env[62383]: DEBUG nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 745.597035] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451527, 'name': ReconfigVM_Task, 'duration_secs': 0.32168} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.602038] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.602635] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90834a36-329e-4ef8-aa8d-17c2bc45a195 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.612986] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 745.612986] env[62383]: value = "task-2451529" [ 745.612986] env[62383]: _type = "Task" [ 745.612986] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.625783] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451529, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.796883] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6a63e6-8f5f-49ac-ad65-0418196b1046 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.821896] env[62383]: DEBUG nova.network.neutron [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Successfully created port: e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.826626] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1edf315b-3149-45e1-a7d3-73a574473cd0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.885651] env[62383]: WARNING nova.compute.manager [None req-6b4bd84b-84e6-4249-97f9-d214299ef25c tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Image not found during snapshot: nova.exception.ImageNotFound: Image f2d66fc2-9067-43f0-93a8-d1a70a4c9f97 could not be found. [ 745.911990] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451528, 'name': Destroy_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.043660] env[62383]: DEBUG nova.network.neutron [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.124019] env[62383]: DEBUG nova.network.neutron [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.135078] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451529, 'name': Rename_Task, 'duration_secs': 0.164069} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.136320] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.136591] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-262304df-a6f4-4376-89b6-b775fde5d35e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.142999] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 746.142999] env[62383]: value = "task-2451530" [ 746.142999] env[62383]: _type = "Task" [ 746.142999] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.155025] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.204803] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f201861-8ca0-464d-aa7d-476c63dc7488 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.213673] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34001861-ddcb-4d29-b7dd-2a50e7650176 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.246014] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dec243f-4ec7-4b80-9620-937f3152b8b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.254334] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9312b213-92ae-41fa-81cd-1eeb7bc3bc33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.268381] env[62383]: DEBUG nova.compute.provider_tree [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 746.338766] env[62383]: DEBUG nova.compute.manager [None req-8c863d45-2922-4221-bc07-1801612d1eaa tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Instance disappeared during snapshot {{(pid=62383) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 746.411754] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451528, 'name': Destroy_Task, 'duration_secs': 0.733369} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.412756] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Destroyed the VM [ 746.412756] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 746.412756] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c08fdd0b-9641-4264-9267-499b60726d59 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.421780] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 746.421780] env[62383]: value = "task-2451531" [ 746.421780] env[62383]: _type = "Task" [ 746.421780] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.429875] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451531, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.473181] env[62383]: DEBUG nova.compute.manager [None req-8c863d45-2922-4221-bc07-1801612d1eaa tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Found 0 images (rotation: 2) {{(pid=62383) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 746.587695] env[62383]: DEBUG nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 746.610399] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 746.610662] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 746.610823] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 746.611015] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 746.611172] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 746.611322] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 746.611530] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 746.611749] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 746.611846] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 746.612039] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 746.612217] env[62383]: DEBUG nova.virt.hardware [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 746.613101] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c022d9a3-b4f2-48fa-b9aa-6ae41f2a7768 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.621070] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d2e815-992e-41c7-a8b8-61b39fc9cb4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.626701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Releasing lock "refresh_cache-3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.627158] env[62383]: DEBUG nova.compute.manager [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 746.627358] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 746.636086] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c576df-5fd3-48b9-be65-13f7e8fdfbaf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.642285] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 746.642469] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48387271-d64d-4fbc-b151-2d3c516a92e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.651351] env[62383]: DEBUG oslo_vmware.api [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451530, 'name': PowerOnVM_Task, 'duration_secs': 0.436713} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.652548] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 746.652789] env[62383]: DEBUG nova.compute.manager [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.653268] env[62383]: DEBUG oslo_vmware.api [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 746.653268] env[62383]: value = "task-2451532" [ 746.653268] env[62383]: _type = "Task" [ 746.653268] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.653851] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b9fc38-5c45-41bf-9f86-6f7162cb5b0c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.668457] env[62383]: DEBUG oslo_vmware.api [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451532, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.679667] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "563840a8-8fa7-4bfa-9912-933c14e7076a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.679932] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.680141] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "563840a8-8fa7-4bfa-9912-933c14e7076a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.680336] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.680512] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.682683] env[62383]: INFO nova.compute.manager [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Terminating instance [ 746.804298] env[62383]: DEBUG nova.scheduler.client.report [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 76 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 746.805505] env[62383]: DEBUG nova.compute.provider_tree [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 76 to 77 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 746.805505] env[62383]: DEBUG nova.compute.provider_tree [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 746.931924] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451531, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.177566] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.177566] env[62383]: DEBUG oslo_vmware.api [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451532, 'name': PowerOffVM_Task, 'duration_secs': 0.133372} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.177566] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 747.177566] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 747.177781] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1b245d3-8a16-4c88-a780-6e0025f7eff2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.186419] env[62383]: DEBUG nova.compute.manager [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 747.186635] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 747.187469] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59137d82-848a-4386-afc9-7348171f7d91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.194799] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.195074] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51f47435-de9f-4f3a-9978-ae6b429698a2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.201667] env[62383]: DEBUG oslo_vmware.api [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 747.201667] env[62383]: value = "task-2451534" [ 747.201667] env[62383]: _type = "Task" [ 747.201667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.211921] env[62383]: DEBUG oslo_vmware.api [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.214206] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 747.214500] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 747.214724] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Deleting the datastore file [datastore2] 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 747.215101] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-718834ec-31fc-4ec9-880e-3cf1499ab3d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.222221] env[62383]: DEBUG oslo_vmware.api [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for the task: (returnval){ [ 747.222221] env[62383]: value = "task-2451535" [ 747.222221] env[62383]: _type = "Task" [ 747.222221] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.230617] env[62383]: DEBUG oslo_vmware.api [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.309846] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.310199] env[62383]: DEBUG nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 747.313859] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.882s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.313859] env[62383]: DEBUG nova.objects.instance [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lazy-loading 'resources' on Instance uuid 0c01a974-2318-461b-965f-ba4932e3bea1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 747.432669] env[62383]: DEBUG oslo_vmware.api [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451531, 'name': RemoveSnapshot_Task, 'duration_secs': 0.524091} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.432941] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 747.433377] env[62383]: INFO nova.compute.manager [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Took 15.93 seconds to snapshot the instance on the hypervisor. [ 747.711840] env[62383]: DEBUG oslo_vmware.api [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451534, 'name': PowerOffVM_Task, 'duration_secs': 0.184184} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.712091] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 747.712274] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 747.712527] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3e00e11-57bf-40ac-b051-e1062ef9e2b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.731088] env[62383]: DEBUG oslo_vmware.api [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Task: {'id': task-2451535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134156} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.731355] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.731541] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 747.731714] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.731889] env[62383]: INFO nova.compute.manager [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Took 1.10 seconds to destroy the instance on the hypervisor. [ 747.732144] env[62383]: DEBUG oslo.service.loopingcall [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.732339] env[62383]: DEBUG nova.compute.manager [-] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 747.732441] env[62383]: DEBUG nova.network.neutron [-] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 747.748529] env[62383]: DEBUG nova.network.neutron [-] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.778404] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 747.778660] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 747.778860] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleting the datastore file [datastore2] 563840a8-8fa7-4bfa-9912-933c14e7076a {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 747.779436] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b1e05c0-f0f7-42f2-b405-43a94d93f1a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.787702] env[62383]: DEBUG oslo_vmware.api [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 747.787702] env[62383]: value = "task-2451537" [ 747.787702] env[62383]: _type = "Task" [ 747.787702] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.795975] env[62383]: DEBUG oslo_vmware.api [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451537, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.817078] env[62383]: DEBUG nova.compute.utils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.822192] env[62383]: DEBUG nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 747.822408] env[62383]: DEBUG nova.network.neutron [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.874379] env[62383]: DEBUG nova.policy [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd2d55a007844916aa9792a2e8c260fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a290905b3384dee8a2afd416c1e4f5c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 748.007261] env[62383]: DEBUG nova.compute.manager [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Found 3 images (rotation: 2) {{(pid=62383) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 748.007501] env[62383]: DEBUG nova.compute.manager [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Rotating out 1 backups {{(pid=62383) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 748.007646] env[62383]: DEBUG nova.compute.manager [None req-02240cd9-d3b5-4f98-a525-cb5dce2fd484 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleting image 6f1f859b-fe53-4112-82ab-d69109fbccbf {{(pid=62383) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 748.186709] env[62383]: DEBUG nova.network.neutron [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Successfully created port: 9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.255336] env[62383]: DEBUG nova.network.neutron [-] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.298667] env[62383]: DEBUG oslo_vmware.api [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451537, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141978} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.299270] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 748.299270] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 748.299491] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 748.299645] env[62383]: INFO nova.compute.manager [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 748.299840] env[62383]: DEBUG oslo.service.loopingcall [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.300129] env[62383]: DEBUG nova.compute.manager [-] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 748.300129] env[62383]: DEBUG nova.network.neutron [-] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 748.306656] env[62383]: DEBUG nova.network.neutron [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Successfully updated port: e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.322792] env[62383]: DEBUG nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 748.471572] env[62383]: DEBUG nova.compute.manager [req-40d2caeb-f9d9-4ed1-8251-b9efc9b526a3 req-5dd35061-61a6-416d-9c0f-000be496b3be service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Received event network-vif-plugged-e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.471572] env[62383]: DEBUG oslo_concurrency.lockutils [req-40d2caeb-f9d9-4ed1-8251-b9efc9b526a3 req-5dd35061-61a6-416d-9c0f-000be496b3be service nova] Acquiring lock "1e367665-1d4b-4686-ac79-c946423c1762-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.471572] env[62383]: DEBUG oslo_concurrency.lockutils [req-40d2caeb-f9d9-4ed1-8251-b9efc9b526a3 req-5dd35061-61a6-416d-9c0f-000be496b3be service nova] Lock "1e367665-1d4b-4686-ac79-c946423c1762-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.472478] env[62383]: DEBUG oslo_concurrency.lockutils [req-40d2caeb-f9d9-4ed1-8251-b9efc9b526a3 req-5dd35061-61a6-416d-9c0f-000be496b3be service nova] Lock "1e367665-1d4b-4686-ac79-c946423c1762-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.472851] env[62383]: DEBUG nova.compute.manager [req-40d2caeb-f9d9-4ed1-8251-b9efc9b526a3 req-5dd35061-61a6-416d-9c0f-000be496b3be service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] No waiting events found dispatching network-vif-plugged-e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 748.473597] env[62383]: WARNING nova.compute.manager [req-40d2caeb-f9d9-4ed1-8251-b9efc9b526a3 req-5dd35061-61a6-416d-9c0f-000be496b3be service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Received unexpected event network-vif-plugged-e09c6085-476c-4c95-a6e0-1175a4786e4d for instance with vm_state building and task_state spawning. [ 748.526606] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-440f061d-da71-45ca-a437-dbaabbbed07f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.538956] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b92265d-39a7-4017-8431-91ddfd303baa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.571831] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c275ae-e6c8-43c3-8e44-383e3051a3ee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.579680] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423a6955-de1b-4ceb-af67-e71ed54742ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.599763] env[62383]: DEBUG nova.compute.provider_tree [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.760566] env[62383]: INFO nova.compute.manager [-] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Took 1.03 seconds to deallocate network for instance. [ 748.811214] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.811421] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.811614] env[62383]: DEBUG nova.network.neutron [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.829228] env[62383]: INFO nova.virt.block_device [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Booting with volume 884f4ed9-0cfe-43bd-8c26-6c9365c1b781 at /dev/sda [ 748.868689] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2c07285-12ed-4db8-b170-a3578fff93e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.879998] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffbcb6c-5cf3-47b2-add0-03e7a46d000f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.897244] env[62383]: DEBUG nova.compute.manager [req-00bfd31e-4db9-46cf-9d73-90e08a58e4b1 req-e9ddd1fc-b54e-4c95-b1e2-fb3cf99e643c service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Received event network-vif-deleted-1ee3599c-2ed4-4e36-9e36-3446e6178380 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 748.897460] env[62383]: INFO nova.compute.manager [req-00bfd31e-4db9-46cf-9d73-90e08a58e4b1 req-e9ddd1fc-b54e-4c95-b1e2-fb3cf99e643c service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Neutron deleted interface 1ee3599c-2ed4-4e36-9e36-3446e6178380; detaching it from the instance and deleting it from the info cache [ 748.897638] env[62383]: DEBUG nova.network.neutron [req-00bfd31e-4db9-46cf-9d73-90e08a58e4b1 req-e9ddd1fc-b54e-4c95-b1e2-fb3cf99e643c service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.922582] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-463f56e1-ffd5-4655-a855-c8ad4981d945 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.930896] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77050762-841a-46c1-8cc5-e39801c646c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.967990] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66e57e5-399d-4bfa-83f6-56f9b9046461 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.974596] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc3f21d-1b24-4618-b482-cfce0be5e1cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.987612] env[62383]: DEBUG nova.virt.block_device [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Updating existing volume attachment record: b5b9db7e-8555-4582-a226-795c4dcf5696 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 749.103109] env[62383]: DEBUG nova.scheduler.client.report [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.177864] env[62383]: DEBUG nova.network.neutron [-] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.230448] env[62383]: INFO nova.compute.manager [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Rebuilding instance [ 749.267758] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.288384] env[62383]: DEBUG nova.compute.manager [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 749.289368] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009d5562-ed22-45ba-8d5a-4f0c6ecb003f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.358811] env[62383]: DEBUG nova.network.neutron [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.400119] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6e816d5-dc4b-4b1b-a29f-f37c20ee6b6e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.414569] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1de51b0-54c0-4617-8b24-28a25ad89cd5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.452620] env[62383]: DEBUG nova.compute.manager [req-00bfd31e-4db9-46cf-9d73-90e08a58e4b1 req-e9ddd1fc-b54e-4c95-b1e2-fb3cf99e643c service nova] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Detach interface failed, port_id=1ee3599c-2ed4-4e36-9e36-3446e6178380, reason: Instance 563840a8-8fa7-4bfa-9912-933c14e7076a could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 749.557214] env[62383]: DEBUG nova.network.neutron [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.610032] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.296s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.613179] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.117s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.613179] env[62383]: DEBUG nova.objects.instance [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lazy-loading 'resources' on Instance uuid 6b5daa17-ad4a-4b30-a1fe-083a1a238667 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 749.634827] env[62383]: INFO nova.scheduler.client.report [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted allocations for instance 0c01a974-2318-461b-965f-ba4932e3bea1 [ 749.681031] env[62383]: INFO nova.compute.manager [-] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Took 1.38 seconds to deallocate network for instance. [ 749.934866] env[62383]: DEBUG nova.network.neutron [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Successfully updated port: 9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 750.062110] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.062485] env[62383]: DEBUG nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Instance network_info: |[{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 750.062987] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:0a:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e09c6085-476c-4c95-a6e0-1175a4786e4d', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.073100] env[62383]: DEBUG oslo.service.loopingcall [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.073392] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.073625] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d775e668-25ee-43a5-9250-f2ae27d8343d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.099887] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.099887] env[62383]: value = "task-2451538" [ 750.099887] env[62383]: _type = "Task" [ 750.099887] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.108965] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451538, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.150657] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c936438c-aef3-4882-9594-0e3872b0b5ed tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "0c01a974-2318-461b-965f-ba4932e3bea1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.066s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.191578] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.231742] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "8a165d96-f503-4bc5-bff4-e6a85201e137" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.232226] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.232450] env[62383]: DEBUG nova.compute.manager [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 750.233863] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d2f0b4-3911-41df-8b30-7df6ac937d8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.245496] env[62383]: DEBUG nova.compute.manager [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 750.246082] env[62383]: DEBUG nova.objects.instance [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'flavor' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.309157] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.310183] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4dfab294-66fd-43c5-9e09-2aaf59cc1787 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.319082] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 750.319082] env[62383]: value = "task-2451539" [ 750.319082] env[62383]: _type = "Task" [ 750.319082] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.333086] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451539, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.437225] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Acquiring lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.437377] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Acquired lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.437526] env[62383]: DEBUG nova.network.neutron [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.507480] env[62383]: DEBUG nova.compute.manager [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Received event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 750.507823] env[62383]: DEBUG nova.compute.manager [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing instance network info cache due to event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 750.508055] env[62383]: DEBUG oslo_concurrency.lockutils [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] Acquiring lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.508332] env[62383]: DEBUG oslo_concurrency.lockutils [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] Acquired lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.508555] env[62383]: DEBUG nova.network.neutron [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 750.612679] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451538, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.664085] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35e7c59-cd5d-4837-ae26-a0ef907f876d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.671955] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae422d04-009f-4cc7-b034-26561bde3c65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.703172] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7feef90-6cb0-4a3b-a914-939f7ed77495 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.713864] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e94b79-b76f-4607-b2b9-2fe72ee2d572 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.729086] env[62383]: DEBUG nova.compute.provider_tree [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.828936] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451539, 'name': PowerOffVM_Task, 'duration_secs': 0.250824} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.829225] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.829456] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.830311] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e226ea9e-385a-45d7-ac2c-82e5d2d6ba17 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.836890] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 750.837312] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31d11420-a5d1-4d8a-bcac-05583d7ef474 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.895023] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.895261] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.895261] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleting the datastore file [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.895261] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d81a5b2e-b7ff-4e6f-a3d7-fc06101a71e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.903510] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 750.903510] env[62383]: value = "task-2451541" [ 750.903510] env[62383]: _type = "Task" [ 750.903510] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.911475] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451541, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.972951] env[62383]: DEBUG nova.network.neutron [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.990686] env[62383]: DEBUG nova.compute.manager [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Received event network-vif-plugged-9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 750.990980] env[62383]: DEBUG oslo_concurrency.lockutils [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] Acquiring lock "d0311c29-e1ed-446f-a52b-1687b9561740-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.991142] env[62383]: DEBUG oslo_concurrency.lockutils [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] Lock "d0311c29-e1ed-446f-a52b-1687b9561740-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.991355] env[62383]: DEBUG oslo_concurrency.lockutils [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] Lock "d0311c29-e1ed-446f-a52b-1687b9561740-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.991482] env[62383]: DEBUG nova.compute.manager [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] No waiting events found dispatching network-vif-plugged-9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 750.991651] env[62383]: WARNING nova.compute.manager [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Received unexpected event network-vif-plugged-9a97ba80-acdf-4ecf-a553-e26b0d98c82f for instance with vm_state building and task_state spawning. [ 750.991861] env[62383]: DEBUG nova.compute.manager [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Received event network-changed-9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 750.992287] env[62383]: DEBUG nova.compute.manager [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Refreshing instance network info cache due to event network-changed-9a97ba80-acdf-4ecf-a553-e26b0d98c82f. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 750.992287] env[62383]: DEBUG oslo_concurrency.lockutils [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] Acquiring lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.100894] env[62383]: DEBUG nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.101500] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.101705] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.101858] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.102051] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.102198] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.102345] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.102550] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.102712] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.102877] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.103057] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.103239] env[62383]: DEBUG nova.virt.hardware [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.104164] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f36654-6464-4707-9d21-99af42482d93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.120311] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451538, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.121552] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63331210-9b94-40c2-9f61-12166ab31060 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.165403] env[62383]: DEBUG nova.network.neutron [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Updating instance_info_cache with network_info: [{"id": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "address": "fa:16:3e:f5:47:38", "network": {"id": "53e3a6ea-2b6a-4dc4-ba7e-d6985ab51dbc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-243091242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a290905b3384dee8a2afd416c1e4f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a97ba80-ac", "ovs_interfaceid": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.233980] env[62383]: DEBUG nova.scheduler.client.report [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.252975] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 751.253345] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64b3d3fc-bba8-4af2-9231-b78757efbe94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.261215] env[62383]: DEBUG oslo_vmware.api [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 751.261215] env[62383]: value = "task-2451542" [ 751.261215] env[62383]: _type = "Task" [ 751.261215] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.272519] env[62383]: DEBUG oslo_vmware.api [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.296714] env[62383]: DEBUG nova.network.neutron [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updated VIF entry in instance network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.297653] env[62383]: DEBUG nova.network.neutron [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.414551] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451541, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126308} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.414551] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.414551] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.414551] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.612703] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451538, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.668571] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Releasing lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.668990] env[62383]: DEBUG nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Instance network_info: |[{"id": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "address": "fa:16:3e:f5:47:38", "network": {"id": "53e3a6ea-2b6a-4dc4-ba7e-d6985ab51dbc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-243091242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a290905b3384dee8a2afd416c1e4f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a97ba80-ac", "ovs_interfaceid": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.669326] env[62383]: DEBUG oslo_concurrency.lockutils [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] Acquired lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.669512] env[62383]: DEBUG nova.network.neutron [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Refreshing network info cache for port 9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.670788] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:47:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a97ba80-acdf-4ecf-a553-e26b0d98c82f', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.678138] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Creating folder: Project (1a290905b3384dee8a2afd416c1e4f5c). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.679300] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12b857fd-80bf-47b0-a21a-448e94174348 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.692504] env[62383]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 751.692504] env[62383]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62383) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 751.692760] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Folder already exists: Project (1a290905b3384dee8a2afd416c1e4f5c). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 751.692964] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Creating folder: Instances. Parent ref: group-v496400. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.693699] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a9e550a-652d-4d7d-8e3c-86a9edc08bba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.702362] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Created folder: Instances in parent group-v496400. [ 751.702598] env[62383]: DEBUG oslo.service.loopingcall [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.702784] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.702985] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7e13aa2-8e72-4a3e-b076-aae25cfbfce8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.720923] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.720923] env[62383]: value = "task-2451545" [ 751.720923] env[62383]: _type = "Task" [ 751.720923] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.728134] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451545, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.737985] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.740546] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.796s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.740675] env[62383]: DEBUG nova.objects.instance [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lazy-loading 'resources' on Instance uuid 872ac212-9f29-426d-94c7-e1bf73aebd94 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 751.766886] env[62383]: INFO nova.scheduler.client.report [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleted allocations for instance 6b5daa17-ad4a-4b30-a1fe-083a1a238667 [ 751.777559] env[62383]: DEBUG oslo_vmware.api [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451542, 'name': PowerOffVM_Task, 'duration_secs': 0.185639} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.777994] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.778342] env[62383]: DEBUG nova.compute.manager [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 751.779617] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5522eb4-f2c1-4984-b5b4-04a28ddb52ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.800799] env[62383]: DEBUG oslo_concurrency.lockutils [req-a3dccac9-3134-446d-b66e-fdfe90a205b7 req-f5fd31f2-e62b-4cc7-8892-04eede4f241a service nova] Releasing lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.113929] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451538, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.230312] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451545, 'name': CreateVM_Task, 'duration_secs': 0.297387} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.230618] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 752.234100] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'b5b9db7e-8555-4582-a226-795c4dcf5696', 'device_type': None, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496409', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'name': 'volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0311c29-e1ed-446f-a52b-1687b9561740', 'attached_at': '', 'detached_at': '', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'serial': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781'}, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62383) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 752.234100] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Root volume attach. Driver type: vmdk {{(pid=62383) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 752.234100] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a6aa9c-bac2-4ab5-b2bc-fe942eaeb4d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.240162] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c70c07f-1d76-4bee-b3af-36ff6b0d08b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.250379] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb201fde-a7c9-48c8-8eaf-4703af86efc1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.256103] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-d7909854-5d4f-4b5c-9ae0-6764fd81a786 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.264906] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 752.264906] env[62383]: value = "task-2451546" [ 752.264906] env[62383]: _type = "Task" [ 752.264906] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.281561] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451546, 'name': RelocateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.282307] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0f3bdb6c-62c2-4901-a541-39b18e686d06 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "6b5daa17-ad4a-4b30-a1fe-083a1a238667" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.544s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.292199] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f97188a9-c4ee-42f1-89a3-b263cbdab9f8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.060s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.442105] env[62383]: DEBUG nova.network.neutron [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Updated VIF entry in instance network info cache for port 9a97ba80-acdf-4ecf-a553-e26b0d98c82f. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.442648] env[62383]: DEBUG nova.network.neutron [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Updating instance_info_cache with network_info: [{"id": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "address": "fa:16:3e:f5:47:38", "network": {"id": "53e3a6ea-2b6a-4dc4-ba7e-d6985ab51dbc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-243091242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a290905b3384dee8a2afd416c1e4f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a97ba80-ac", "ovs_interfaceid": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.453019] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 752.453019] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.453019] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 752.453237] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.453237] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 752.453237] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 752.453439] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 752.453739] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 752.453739] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 752.453905] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 752.454363] env[62383]: DEBUG nova.virt.hardware [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 752.455404] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9584d9-1804-4f39-9041-2d7e23b063d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.468151] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfa37fa-9be4-4e22-a1ce-5828bfa17b11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.490362] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:cf:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1d30299-f4ce-40b4-9046-fd1d10565fd3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.500076] env[62383]: DEBUG oslo.service.loopingcall [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.503547] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 752.503949] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e87e58bc-3dfd-47cb-a1c0-139c4910d051 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.529080] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.529080] env[62383]: value = "task-2451547" [ 752.529080] env[62383]: _type = "Task" [ 752.529080] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.537724] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451547, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.618041] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451538, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.786199] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451546, 'name': RelocateVM_Task, 'duration_secs': 0.396096} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.786532] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 752.790033] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496409', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'name': 'volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0311c29-e1ed-446f-a52b-1687b9561740', 'attached_at': '', 'detached_at': '', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'serial': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 752.790033] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e3e854-f70a-44d6-ad9f-a99238deda54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.813958] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323834f5-d3fc-4b25-bbed-539088b5b82a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.839271] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781/volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.840675] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fca86982-3346-47eb-b2fd-0026a3a82872 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.857362] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc8d89f-f3d0-4b08-b8d2-99cdad7d81a2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.865369] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb09341-58b5-4ddc-ba87-15f82a24ab8b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.868172] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 752.868172] env[62383]: value = "task-2451548" [ 752.868172] env[62383]: _type = "Task" [ 752.868172] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.898915] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e4a3d9-9fec-4514-9112-a637c5db12fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.905124] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451548, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.910431] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068cb331-70d2-4815-8fa8-f56c2000438f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.925334] env[62383]: DEBUG nova.compute.provider_tree [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 752.946805] env[62383]: DEBUG oslo_concurrency.lockutils [req-e01f6cc8-446c-46a5-9c7f-3ac1d55d50b1 req-fc9562c5-b7af-4fbd-a3b6-77061bed36af service nova] Releasing lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.040154] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451547, 'name': CreateVM_Task, 'duration_secs': 0.325401} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.040338] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 753.041044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.041197] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.041517] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 753.041779] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f35903c5-9b8d-42c7-8295-f7b95760a1b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.051672] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 753.051672] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5227f60b-4dda-8e7f-3f4d-bfd0aff9a863" [ 753.051672] env[62383]: _type = "Task" [ 753.051672] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.063170] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5227f60b-4dda-8e7f-3f4d-bfd0aff9a863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.115832] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451538, 'name': CreateVM_Task, 'duration_secs': 2.921713} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.116031] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 753.116763] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.116942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.117281] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 753.117596] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00ca017a-3858-44c1-9b00-9c05d4da9e1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.122410] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 753.122410] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1d4bd-a060-9c45-8dd9-fd7d2f6c5516" [ 753.122410] env[62383]: _type = "Task" [ 753.122410] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.132537] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1d4bd-a060-9c45-8dd9-fd7d2f6c5516, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.377751] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451548, 'name': ReconfigVM_Task, 'duration_secs': 0.310592} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.378102] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Reconfigured VM instance instance-00000034 to attach disk [datastore2] volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781/volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.383032] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-560d5fcf-b4ec-431c-abcb-35f0f196fade {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.398045] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 753.398045] env[62383]: value = "task-2451549" [ 753.398045] env[62383]: _type = "Task" [ 753.398045] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.408225] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.452349] env[62383]: ERROR nova.scheduler.client.report [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] [req-2ff5413c-2a7a-4622-85b8-b2580f00d307] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2ff5413c-2a7a-4622-85b8-b2580f00d307"}]} [ 753.473337] env[62383]: DEBUG nova.scheduler.client.report [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 753.491165] env[62383]: DEBUG nova.scheduler.client.report [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 753.491572] env[62383]: DEBUG nova.compute.provider_tree [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 753.508027] env[62383]: DEBUG nova.scheduler.client.report [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 753.526618] env[62383]: DEBUG nova.scheduler.client.report [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 753.561932] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5227f60b-4dda-8e7f-3f4d-bfd0aff9a863, 'name': SearchDatastore_Task, 'duration_secs': 0.012044} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.561932] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.562133] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.562263] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.563219] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.563219] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.563219] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e76d188f-6f2b-4e39-948c-351085255f91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.579188] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.579375] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.582343] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad90c99e-c3b4-4c00-9266-85c94b591f8c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.589056] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 753.589056] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52689b4d-3f4b-6496-0be6-579fb7eb14c3" [ 753.589056] env[62383]: _type = "Task" [ 753.589056] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.594940] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52689b4d-3f4b-6496-0be6-579fb7eb14c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.633987] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1d4bd-a060-9c45-8dd9-fd7d2f6c5516, 'name': SearchDatastore_Task, 'duration_secs': 0.010394} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.634835] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.635072] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.635303] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.635655] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.635655] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.635865] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-feefdc90-8e22-410f-8804-997d6c8189f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.650960] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.651151] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.651851] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0df17037-0bb9-4291-918d-6e52d983838b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.656650] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 753.656650] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524ec056-985c-5a6e-616c-79ec1542e69c" [ 753.656650] env[62383]: _type = "Task" [ 753.656650] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.666645] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524ec056-985c-5a6e-616c-79ec1542e69c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.784277] env[62383]: DEBUG nova.compute.manager [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Stashing vm_state: stopped {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 753.907815] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451549, 'name': ReconfigVM_Task, 'duration_secs': 0.12647} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.908131] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496409', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'name': 'volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0311c29-e1ed-446f-a52b-1687b9561740', 'attached_at': '', 'detached_at': '', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'serial': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 753.908898] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f1e5737-0182-4e17-9a6e-83902ff6151b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.915332] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 753.915332] env[62383]: value = "task-2451550" [ 753.915332] env[62383]: _type = "Task" [ 753.915332] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.923106] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451550, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.033343] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f8898e-2c1b-4ca8-b282-c4ccccb51542 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.040767] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbd7a45-f657-48cc-a300-a5f941312c68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.073687] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed3db8a-2c88-4824-8fc0-91dff242201f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.081345] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829a7996-b0dc-4897-a41f-9ee3e016fe76 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.095050] env[62383]: DEBUG nova.compute.provider_tree [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 754.104835] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52689b4d-3f4b-6496-0be6-579fb7eb14c3, 'name': SearchDatastore_Task, 'duration_secs': 0.03168} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.106198] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7538aebd-da3d-4653-b4d5-370db1376c21 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.117357] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 754.117357] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520ef9bf-f6bf-cd58-ed50-ce5522ed3a4e" [ 754.117357] env[62383]: _type = "Task" [ 754.117357] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.125682] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520ef9bf-f6bf-cd58-ed50-ce5522ed3a4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.165737] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524ec056-985c-5a6e-616c-79ec1542e69c, 'name': SearchDatastore_Task, 'duration_secs': 0.023011} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.167770] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa110cab-f3b7-472e-b79e-2dc0f8ac2cd6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.172026] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 754.172026] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]523e261d-839b-8f0f-9f2d-8a67f66bb118" [ 754.172026] env[62383]: _type = "Task" [ 754.172026] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.179633] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523e261d-839b-8f0f-9f2d-8a67f66bb118, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.272461] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.272849] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.321541] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.429062] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451550, 'name': Rename_Task, 'duration_secs': 0.134815} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.429062] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 754.429062] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e400b40b-cba1-4e77-b1f5-9a60bbcac87f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.433187] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 754.433187] env[62383]: value = "task-2451551" [ 754.433187] env[62383]: _type = "Task" [ 754.433187] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.443063] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.627346] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520ef9bf-f6bf-cd58-ed50-ce5522ed3a4e, 'name': SearchDatastore_Task, 'duration_secs': 0.036616} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.627633] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.627888] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 754.628182] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02f9f0c3-8fc2-4869-9a57-542f5efdedd9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.636262] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 754.636262] env[62383]: value = "task-2451552" [ 754.636262] env[62383]: _type = "Task" [ 754.636262] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.637560] env[62383]: DEBUG nova.scheduler.client.report [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 79 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 754.637994] env[62383]: DEBUG nova.compute.provider_tree [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 79 to 80 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 754.638151] env[62383]: DEBUG nova.compute.provider_tree [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 754.650607] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451552, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.684025] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523e261d-839b-8f0f-9f2d-8a67f66bb118, 'name': SearchDatastore_Task, 'duration_secs': 0.030588} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.684351] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.684648] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/1e367665-1d4b-4686-ac79-c946423c1762.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 754.684931] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab6e5fbb-6a0e-4661-805c-5e0edfa07f48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.692249] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 754.692249] env[62383]: value = "task-2451553" [ 754.692249] env[62383]: _type = "Task" [ 754.692249] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.700960] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451553, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.785824] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.786026] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 754.945444] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451551, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.149704] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.409s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.156439] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.114s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.156439] env[62383]: INFO nova.compute.claims [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.168255] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451552, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.199427] env[62383]: INFO nova.scheduler.client.report [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Deleted allocations for instance 872ac212-9f29-426d-94c7-e1bf73aebd94 [ 755.205625] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451553, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.348734] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.348734] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.348734] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 755.442576] env[62383]: DEBUG oslo_vmware.api [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451551, 'name': PowerOnVM_Task, 'duration_secs': 0.614005} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.442897] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.443055] env[62383]: INFO nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Took 4.34 seconds to spawn the instance on the hypervisor. [ 755.443252] env[62383]: DEBUG nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 755.444532] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c5f83a-3705-4394-a6e8-6ce9acdc0c0b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.660115] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451552, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635573} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.663133] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.663133] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.671867] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51e5ee92-da5a-4dd3-adf6-d4dfaccf9f80 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.676361] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 755.676361] env[62383]: value = "task-2451554" [ 755.676361] env[62383]: _type = "Task" [ 755.676361] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.682213] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451554, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.704451] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451553, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.705662} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.704720] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/1e367665-1d4b-4686-ac79-c946423c1762.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.704939] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.705230] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-192d636b-eb57-476f-871f-2799b1c6b9ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.715538] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fbfc463-8351-4319-a5c6-0c29d22a3775 tempest-ListServersNegativeTestJSON-841074668 tempest-ListServersNegativeTestJSON-841074668-project-member] Lock "872ac212-9f29-426d-94c7-e1bf73aebd94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.388s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.721366] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 755.721366] env[62383]: value = "task-2451555" [ 755.721366] env[62383]: _type = "Task" [ 755.721366] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.729544] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451555, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.965702] env[62383]: INFO nova.compute.manager [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Took 43.33 seconds to build instance. [ 756.186825] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451554, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083926} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.186825] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 756.186825] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c2b6dc-940a-476d-bee1-5c53136a65f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.207109] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 756.214964] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fdaf8dd-defc-41ca-9e5e-4b6996a007d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.240023] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451555, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213284} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.242192] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 756.243037] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 756.243037] env[62383]: value = "task-2451556" [ 756.243037] env[62383]: _type = "Task" [ 756.243037] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.243774] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0df5dd-1d91-4d75-b8e5-87b4fbd61453 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.275053] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/1e367665-1d4b-4686-ac79-c946423c1762.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 756.281513] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb063322-77d3-410b-9dc7-8fb20f772603 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.299738] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.306489] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 756.306489] env[62383]: value = "task-2451557" [ 756.306489] env[62383]: _type = "Task" [ 756.306489] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.315451] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.466627] env[62383]: DEBUG oslo_concurrency.lockutils [None req-adcd091d-dda8-4363-895b-2bf94b5b6899 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "d0311c29-e1ed-446f-a52b-1687b9561740" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.891s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.736889] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance_info_cache with network_info: [{"id": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "address": "fa:16:3e:21:59:e3", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5205d6ef-09", "ovs_interfaceid": "5205d6ef-091d-4460-bd6c-3b1c5873c3ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.756214] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451556, 'name': ReconfigVM_Task, 'duration_secs': 0.301874} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.756492] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f/1a740010-ddd0-4df6-8ae6-02f1ed50137f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.757133] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17faf0ef-cbe3-4a1e-96b1-bbde35532962 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.767814] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 756.767814] env[62383]: value = "task-2451558" [ 756.767814] env[62383]: _type = "Task" [ 756.767814] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.777653] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451558, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.800419] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dda6e74-7f1e-47a6-910a-9484e15998cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.807220] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2fb187-21e3-4ec9-b294-9f564e7bf019 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.818404] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451557, 'name': ReconfigVM_Task, 'duration_secs': 0.33961} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.844077] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/1e367665-1d4b-4686-ac79-c946423c1762.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.845504] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9e79a08-43db-4321-add5-c69a0201f2ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.847717] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17ef244-e3aa-483a-9e11-187b86f81d3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.855968] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b08adf6-8db6-42a1-91c9-a47c9d16e471 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.859823] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 756.859823] env[62383]: value = "task-2451559" [ 756.859823] env[62383]: _type = "Task" [ 756.859823] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.871586] env[62383]: DEBUG nova.compute.provider_tree [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.878397] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451559, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.242683] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.242683] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 757.242683] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.242683] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.242683] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.242683] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.242993] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.242993] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.242993] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 757.243274] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.277811] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451558, 'name': Rename_Task, 'duration_secs': 0.141181} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.278678] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.278678] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbdc0c03-5d95-4b47-89c3-8a46814fb1bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.286401] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 757.286401] env[62383]: value = "task-2451560" [ 757.286401] env[62383]: _type = "Task" [ 757.286401] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.298285] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.374273] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451559, 'name': Rename_Task, 'duration_secs': 0.144773} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.375229] env[62383]: DEBUG nova.scheduler.client.report [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.383931] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.383931] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e82e7752-0d9b-4eb0-ba9e-046ee41f094c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.390943] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 757.390943] env[62383]: value = "task-2451561" [ 757.390943] env[62383]: _type = "Task" [ 757.390943] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.402474] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451561, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.748875] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.801110] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451560, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.829706] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.829906] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.885025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.731s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.885867] env[62383]: DEBUG nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 757.891729] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.290s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.895119] env[62383]: INFO nova.compute.claims [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.908988] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451561, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.933829] env[62383]: DEBUG nova.compute.manager [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Received event network-changed-9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 757.933829] env[62383]: DEBUG nova.compute.manager [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Refreshing instance network info cache due to event network-changed-9a97ba80-acdf-4ecf-a553-e26b0d98c82f. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 757.933829] env[62383]: DEBUG oslo_concurrency.lockutils [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] Acquiring lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.933829] env[62383]: DEBUG oslo_concurrency.lockutils [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] Acquired lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.933829] env[62383]: DEBUG nova.network.neutron [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Refreshing network info cache for port 9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.296626] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451560, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.333499] env[62383]: DEBUG nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.407801] env[62383]: DEBUG nova.compute.utils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 758.414793] env[62383]: DEBUG nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 758.415019] env[62383]: DEBUG nova.network.neutron [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.425190] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451561, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.506244] env[62383]: DEBUG nova.policy [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 758.788791] env[62383]: DEBUG nova.network.neutron [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Updated VIF entry in instance network info cache for port 9a97ba80-acdf-4ecf-a553-e26b0d98c82f. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.789106] env[62383]: DEBUG nova.network.neutron [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Updating instance_info_cache with network_info: [{"id": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "address": "fa:16:3e:f5:47:38", "network": {"id": "53e3a6ea-2b6a-4dc4-ba7e-d6985ab51dbc", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-243091242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1a290905b3384dee8a2afd416c1e4f5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a97ba80-ac", "ovs_interfaceid": "9a97ba80-acdf-4ecf-a553-e26b0d98c82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.802730] env[62383]: DEBUG oslo_vmware.api [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451560, 'name': PowerOnVM_Task, 'duration_secs': 1.235644} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.804565] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.804873] env[62383]: DEBUG nova.compute.manager [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 758.806032] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77b1c98-68a0-4623-85bd-a2d2e83c126c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.860035] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.914802] env[62383]: DEBUG oslo_vmware.api [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451561, 'name': PowerOnVM_Task, 'duration_secs': 1.12787} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.915452] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.915848] env[62383]: INFO nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Took 12.33 seconds to spawn the instance on the hypervisor. [ 758.916115] env[62383]: DEBUG nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 758.917167] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0b9b65-2997-4a93-bc36-1fcbb61c61d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.921539] env[62383]: DEBUG nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 758.945348] env[62383]: DEBUG nova.network.neutron [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Successfully created port: e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.297951] env[62383]: DEBUG oslo_concurrency.lockutils [req-309c8dc7-aa46-4107-b3ab-ff77db8f99c6 req-4b06422b-986f-4b73-83a5-9987a8839729 service nova] Releasing lock "refresh_cache-d0311c29-e1ed-446f-a52b-1687b9561740" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.329033] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.446143] env[62383]: INFO nova.compute.manager [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Took 49.33 seconds to build instance. [ 759.480024] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cc93e7-bec0-4550-80bb-347612c9ac71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.488598] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2079dcaf-3e6a-4804-a38a-50ba061d53fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.523195] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f91405-e92c-41f7-b939-6b6bcee58da4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.531809] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abb1e18-d739-4849-92cd-467ffadcde09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.549694] env[62383]: DEBUG nova.compute.provider_tree [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.932892] env[62383]: DEBUG nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 759.948290] env[62383]: DEBUG oslo_concurrency.lockutils [None req-885e0602-8992-40e5-94c9-85ddcb52ce18 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.746s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.981438] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 759.981656] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.981824] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 759.982016] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.982180] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 759.982335] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 759.982538] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 759.982699] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 759.982870] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 759.983054] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 759.983232] env[62383]: DEBUG nova.virt.hardware [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 759.984116] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab9f936-42fb-4298-be7e-0cc5632a0055 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.998764] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f350e5-c5ad-465b-a358-34e871f176f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.053270] env[62383]: DEBUG nova.scheduler.client.report [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.145075] env[62383]: DEBUG nova.compute.manager [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Received event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 760.145606] env[62383]: DEBUG nova.compute.manager [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing instance network info cache due to event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 760.146016] env[62383]: DEBUG oslo_concurrency.lockutils [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] Acquiring lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.146409] env[62383]: DEBUG oslo_concurrency.lockutils [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] Acquired lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.147209] env[62383]: DEBUG nova.network.neutron [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.494039] env[62383]: DEBUG nova.compute.manager [req-4477c8bf-fdf4-4fcb-82cd-28c2a3d9afeb req-d86d0130-9659-48bb-a849-52395689c650 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received event network-vif-plugged-e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 760.494313] env[62383]: DEBUG oslo_concurrency.lockutils [req-4477c8bf-fdf4-4fcb-82cd-28c2a3d9afeb req-d86d0130-9659-48bb-a849-52395689c650 service nova] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.494512] env[62383]: DEBUG oslo_concurrency.lockutils [req-4477c8bf-fdf4-4fcb-82cd-28c2a3d9afeb req-d86d0130-9659-48bb-a849-52395689c650 service nova] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.494677] env[62383]: DEBUG oslo_concurrency.lockutils [req-4477c8bf-fdf4-4fcb-82cd-28c2a3d9afeb req-d86d0130-9659-48bb-a849-52395689c650 service nova] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.494902] env[62383]: DEBUG nova.compute.manager [req-4477c8bf-fdf4-4fcb-82cd-28c2a3d9afeb req-d86d0130-9659-48bb-a849-52395689c650 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] No waiting events found dispatching network-vif-plugged-e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 760.495126] env[62383]: WARNING nova.compute.manager [req-4477c8bf-fdf4-4fcb-82cd-28c2a3d9afeb req-d86d0130-9659-48bb-a849-52395689c650 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received unexpected event network-vif-plugged-e527ebe3-bc3d-4e96-8325-891e543bdb39 for instance with vm_state building and task_state spawning. [ 760.559978] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.560508] env[62383]: DEBUG nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 760.563511] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.010s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.564986] env[62383]: INFO nova.compute.claims [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.581018] env[62383]: DEBUG nova.network.neutron [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Successfully updated port: e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 760.864184] env[62383]: DEBUG nova.network.neutron [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updated VIF entry in instance network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 760.864556] env[62383]: DEBUG nova.network.neutron [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.922014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "a10f5b03-c45b-4cc2-923f-3227665d236c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.922313] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.922541] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "a10f5b03-c45b-4cc2-923f-3227665d236c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.922728] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.922903] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.924907] env[62383]: INFO nova.compute.manager [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Terminating instance [ 761.075043] env[62383]: DEBUG nova.compute.utils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.079486] env[62383]: DEBUG nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.079486] env[62383]: DEBUG nova.network.neutron [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.083810] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.083957] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.084633] env[62383]: DEBUG nova.network.neutron [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.119556] env[62383]: DEBUG nova.policy [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6bcce66563194bab86486a66106ef770', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75346deaf9ad40fa925d4aff9fdff2cc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 761.367085] env[62383]: DEBUG oslo_concurrency.lockutils [req-97e9faea-05db-4c5d-b526-41464a771b9a req-74d8d621-4a0b-472a-9c0d-918ec641ce14 service nova] Releasing lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.420495] env[62383]: DEBUG nova.network.neutron [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Successfully created port: 66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.429655] env[62383]: DEBUG nova.compute.manager [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 761.430020] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.431074] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee9a6c2-e612-4675-a586-18bc1ce8ef2a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.440345] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.440639] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2867c87-88d2-45dd-81d5-d77fc232e625 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.464673] env[62383]: DEBUG oslo_vmware.api [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 761.464673] env[62383]: value = "task-2451562" [ 761.464673] env[62383]: _type = "Task" [ 761.464673] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.475086] env[62383]: DEBUG oslo_vmware.api [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451562, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.580768] env[62383]: DEBUG nova.compute.utils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.646092] env[62383]: DEBUG nova.network.neutron [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.833521] env[62383]: DEBUG nova.network.neutron [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.978757] env[62383]: DEBUG oslo_vmware.api [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451562, 'name': PowerOffVM_Task, 'duration_secs': 0.223414} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.979114] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 761.979364] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 761.983117] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-029d7973-567e-47d5-aa5f-55c2ea68f673 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.054252] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.054458] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.054653] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleting the datastore file [datastore2] a10f5b03-c45b-4cc2-923f-3227665d236c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.054919] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ae58eca-995b-424b-ae23-c668bef98b3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.061893] env[62383]: DEBUG oslo_vmware.api [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 762.061893] env[62383]: value = "task-2451564" [ 762.061893] env[62383]: _type = "Task" [ 762.061893] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.071977] env[62383]: DEBUG oslo_vmware.api [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.083892] env[62383]: DEBUG nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 762.180629] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62508f4d-ec00-4330-a5bd-f6d0cbb7bc2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.189534] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65785a2e-e89e-4b3b-8853-ff9b60b6b64e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.224278] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce0b863-f336-4274-99c6-f5a0ed654ccd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.231956] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2949a26b-cecd-4450-a88c-20f244a0cd0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.248186] env[62383]: DEBUG nova.compute.provider_tree [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.336433] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.337083] env[62383]: DEBUG nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Instance network_info: |[{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 762.337330] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:51:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e527ebe3-bc3d-4e96-8325-891e543bdb39', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.344910] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Creating folder: Project (ba559fb5da01474791c2408ca92bbff6). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.345216] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47e5dab8-4f61-4f81-ab52-b49b7db8f55f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.356579] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Created folder: Project (ba559fb5da01474791c2408ca92bbff6) in parent group-v496304. [ 762.356793] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Creating folder: Instances. Parent ref: group-v496465. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.357055] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5050fca-0bca-4580-b904-f46cee8f7e7f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.366981] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Created folder: Instances in parent group-v496465. [ 762.367173] env[62383]: DEBUG oslo.service.loopingcall [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.367359] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 762.367563] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6d31c89-11b1-4569-8bdb-31e5494dfcf6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.388224] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.388224] env[62383]: value = "task-2451567" [ 762.388224] env[62383]: _type = "Task" [ 762.388224] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.398829] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451567, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.522666] env[62383]: DEBUG nova.compute.manager [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received event network-changed-e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 762.522864] env[62383]: DEBUG nova.compute.manager [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Refreshing instance network info cache due to event network-changed-e527ebe3-bc3d-4e96-8325-891e543bdb39. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 762.523616] env[62383]: DEBUG oslo_concurrency.lockutils [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] Acquiring lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.523616] env[62383]: DEBUG oslo_concurrency.lockutils [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] Acquired lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.523616] env[62383]: DEBUG nova.network.neutron [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Refreshing network info cache for port e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 762.571872] env[62383]: DEBUG oslo_vmware.api [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187099} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.572154] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 762.572334] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 762.572513] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 762.572683] env[62383]: INFO nova.compute.manager [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 762.572976] env[62383]: DEBUG oslo.service.loopingcall [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.573238] env[62383]: DEBUG nova.compute.manager [-] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 762.573336] env[62383]: DEBUG nova.network.neutron [-] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 762.753365] env[62383]: DEBUG nova.scheduler.client.report [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.904563] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451567, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.097264] env[62383]: DEBUG nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 763.141286] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:24:26Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1081965563',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1031957495',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 763.141534] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.141692] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 763.141876] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.142255] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 763.142255] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 763.142386] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 763.142539] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 763.142700] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 763.142858] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 763.143040] env[62383]: DEBUG nova.virt.hardware [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 763.144044] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7eb2e7-a90d-4ef7-99bd-647c45cfb7e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.153731] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ebd68b-78a9-4dcc-be9f-f7c6a00c1741 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.246110] env[62383]: DEBUG nova.network.neutron [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Successfully updated port: 66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 763.258943] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.695s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.259489] env[62383]: DEBUG nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.266704] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.052s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.266928] env[62383]: DEBUG nova.objects.instance [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lazy-loading 'resources' on Instance uuid f28beb17-8455-49d3-8be0-7636b9abe4e8 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.327782] env[62383]: DEBUG nova.network.neutron [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updated VIF entry in instance network info cache for port e527ebe3-bc3d-4e96-8325-891e543bdb39. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 763.328165] env[62383]: DEBUG nova.network.neutron [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.400779] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451567, 'name': CreateVM_Task, 'duration_secs': 0.589561} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.400779] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.401482] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.401587] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.401914] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.402187] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b8ed472-2032-4a94-adfc-2143042cc7bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.407062] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 763.407062] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52824378-b257-eaff-ea9d-8f6f81e678e2" [ 763.407062] env[62383]: _type = "Task" [ 763.407062] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.416212] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52824378-b257-eaff-ea9d-8f6f81e678e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.429769] env[62383]: DEBUG nova.network.neutron [-] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.748847] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.748847] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.749064] env[62383]: DEBUG nova.network.neutron [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.773183] env[62383]: DEBUG nova.compute.utils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 763.775354] env[62383]: DEBUG nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 763.775354] env[62383]: DEBUG nova.network.neutron [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 763.819324] env[62383]: DEBUG nova.policy [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bda8cb7b1005458ca6fc7e5ca6882e6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '603ba5501c904542b6ff0935f620e6da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 763.831758] env[62383]: DEBUG oslo_concurrency.lockutils [req-56b7d31b-4b7c-4bbd-9362-26ef04134761 req-dd4c3463-5173-4d69-9315-ef0163a0a708 service nova] Releasing lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.917942] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52824378-b257-eaff-ea9d-8f6f81e678e2, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.920572] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.921518] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 763.921518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.921518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.921518] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 763.922576] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a70e8ee-2c40-4b74-82e3-b7a00c1ea5ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.931968] env[62383]: INFO nova.compute.manager [-] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Took 1.36 seconds to deallocate network for instance. [ 763.932382] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 763.932574] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 763.937534] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-733828b9-b50a-4da0-9259-93c2284c53bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.947141] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 763.947141] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527b2390-240c-3620-0085-fbc7f2fbc606" [ 763.947141] env[62383]: _type = "Task" [ 763.947141] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.956432] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527b2390-240c-3620-0085-fbc7f2fbc606, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.118708] env[62383]: DEBUG nova.network.neutron [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Successfully created port: 79458cb2-668a-4c04-882f-c00f465ccd9d {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.279143] env[62383]: DEBUG nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 764.291761] env[62383]: DEBUG nova.network.neutron [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.384217] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431e1854-278e-4249-a84b-675628652d13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.393381] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f1f316-83fc-4fd6-9bb1-df1d9e9f205e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.430014] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e20e2d-d79f-43b3-a2c3-1aebd8918820 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.439404] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81854c56-353b-4204-af4b-b869af1424be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.447299] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.460600] env[62383]: DEBUG nova.compute.provider_tree [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.468444] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527b2390-240c-3620-0085-fbc7f2fbc606, 'name': SearchDatastore_Task, 'duration_secs': 0.010128} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.469275] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f2557b-ffdd-4b61-b231-d8cec6f87c84 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.474780] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 764.474780] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52560218-d3a1-db25-90f9-b8145266302c" [ 764.474780] env[62383]: _type = "Task" [ 764.474780] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.487198] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52560218-d3a1-db25-90f9-b8145266302c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.518842] env[62383]: DEBUG nova.network.neutron [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Updating instance_info_cache with network_info: [{"id": "66ff13c1-430d-414e-a83a-3d9e7536688a", "address": "fa:16:3e:b3:22:55", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ff13c1-43", "ovs_interfaceid": "66ff13c1-430d-414e-a83a-3d9e7536688a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.648743] env[62383]: DEBUG nova.compute.manager [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Received event network-vif-deleted-45641616-950c-40e9-8a0f-76d8fd08bc82 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 764.648981] env[62383]: DEBUG nova.compute.manager [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Received event network-vif-plugged-66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 764.649309] env[62383]: DEBUG oslo_concurrency.lockutils [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] Acquiring lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.649568] env[62383]: DEBUG oslo_concurrency.lockutils [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.649813] env[62383]: DEBUG oslo_concurrency.lockutils [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.650045] env[62383]: DEBUG nova.compute.manager [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] No waiting events found dispatching network-vif-plugged-66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 764.650260] env[62383]: WARNING nova.compute.manager [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Received unexpected event network-vif-plugged-66ff13c1-430d-414e-a83a-3d9e7536688a for instance with vm_state building and task_state spawning. [ 764.650462] env[62383]: DEBUG nova.compute.manager [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Received event network-changed-66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 764.650658] env[62383]: DEBUG nova.compute.manager [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Refreshing instance network info cache due to event network-changed-66ff13c1-430d-414e-a83a-3d9e7536688a. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 764.650868] env[62383]: DEBUG oslo_concurrency.lockutils [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] Acquiring lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.912363] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "12e6baef-0614-4a12-b958-30b0f56fe486" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.912692] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "12e6baef-0614-4a12-b958-30b0f56fe486" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.963954] env[62383]: DEBUG nova.scheduler.client.report [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.986364] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52560218-d3a1-db25-90f9-b8145266302c, 'name': SearchDatastore_Task, 'duration_secs': 0.009962} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.986623] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.986872] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4cd9c7be-c5f4-460b-a9e2-e8f778076947/4cd9c7be-c5f4-460b-a9e2-e8f778076947.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.987139] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10f43a42-9802-44ca-aa20-d38ccd409c3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.993119] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 764.993119] env[62383]: value = "task-2451568" [ 764.993119] env[62383]: _type = "Task" [ 764.993119] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.000466] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451568, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.021046] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Releasing lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.021406] env[62383]: DEBUG nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Instance network_info: |[{"id": "66ff13c1-430d-414e-a83a-3d9e7536688a", "address": "fa:16:3e:b3:22:55", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ff13c1-43", "ovs_interfaceid": "66ff13c1-430d-414e-a83a-3d9e7536688a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 765.021719] env[62383]: DEBUG oslo_concurrency.lockutils [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] Acquired lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.021922] env[62383]: DEBUG nova.network.neutron [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Refreshing network info cache for port 66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 765.023219] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:22:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66ff13c1-430d-414e-a83a-3d9e7536688a', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.032316] env[62383]: DEBUG oslo.service.loopingcall [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.035507] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 765.036143] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07aed39b-9c76-4449-be55-8bf5df36a2b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.057210] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.057210] env[62383]: value = "task-2451569" [ 765.057210] env[62383]: _type = "Task" [ 765.057210] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.066443] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451569, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.265616] env[62383]: DEBUG nova.network.neutron [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Updated VIF entry in instance network info cache for port 66ff13c1-430d-414e-a83a-3d9e7536688a. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 765.266010] env[62383]: DEBUG nova.network.neutron [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Updating instance_info_cache with network_info: [{"id": "66ff13c1-430d-414e-a83a-3d9e7536688a", "address": "fa:16:3e:b3:22:55", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ff13c1-43", "ovs_interfaceid": "66ff13c1-430d-414e-a83a-3d9e7536688a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.293749] env[62383]: DEBUG nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 765.333144] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.333585] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.333887] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.334256] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.334549] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.334869] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.335335] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.336550] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.336550] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.336550] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.336787] env[62383]: DEBUG nova.virt.hardware [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.338071] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2993162-60b9-4864-a98f-1e19214bc938 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.352834] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40d8de7-cb04-456e-8f0d-6316cbb51d40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.415780] env[62383]: DEBUG nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 765.469901] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.203s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.472389] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.651s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.474038] env[62383]: INFO nova.compute.claims [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.504531] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451568, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498629} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.504916] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4cd9c7be-c5f4-460b-a9e2-e8f778076947/4cd9c7be-c5f4-460b-a9e2-e8f778076947.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.505243] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.505700] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d29b5699-45b4-46c0-b508-3715b558e5c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.510780] env[62383]: INFO nova.scheduler.client.report [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Deleted allocations for instance f28beb17-8455-49d3-8be0-7636b9abe4e8 [ 765.517659] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 765.517659] env[62383]: value = "task-2451570" [ 765.517659] env[62383]: _type = "Task" [ 765.517659] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.527681] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.566904] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451569, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.768971] env[62383]: DEBUG oslo_concurrency.lockutils [req-719f5478-e30f-4bfa-89d5-b2c52a9a6fbc req-a04479de-2d15-4706-a6f2-c4fef89d7b21 service nova] Releasing lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.938115] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.944938] env[62383]: DEBUG nova.network.neutron [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Successfully updated port: 79458cb2-668a-4c04-882f-c00f465ccd9d {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 766.019218] env[62383]: DEBUG oslo_concurrency.lockutils [None req-485206c1-95f0-455c-a18d-cf56bde39f38 tempest-ServersTestJSON-482144226 tempest-ServersTestJSON-482144226-project-member] Lock "f28beb17-8455-49d3-8be0-7636b9abe4e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.776s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.029103] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085485} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.029394] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 766.030179] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2645f35f-8598-41b2-85e4-93950a833d7a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.052587] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 4cd9c7be-c5f4-460b-a9e2-e8f778076947/4cd9c7be-c5f4-460b-a9e2-e8f778076947.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.053097] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10c4ced1-3a51-4c60-968b-3c93d37422ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.074911] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451569, 'name': CreateVM_Task, 'duration_secs': 0.590707} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.075992] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 766.076326] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 766.076326] env[62383]: value = "task-2451571" [ 766.076326] env[62383]: _type = "Task" [ 766.076326] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.076938] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.077112] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.077427] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 766.077714] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22f55573-b548-4035-95b3-344aecbf0760 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.084887] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 766.084887] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525f61f8-4a90-26bc-73da-93b762d205ae" [ 766.084887] env[62383]: _type = "Task" [ 766.084887] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.088449] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451571, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.097819] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525f61f8-4a90-26bc-73da-93b762d205ae, 'name': SearchDatastore_Task, 'duration_secs': 0.00905} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.098087] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.098314] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.098535] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.098709] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.098892] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.099147] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce978b10-d606-437c-a49f-f775efe5a09b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.105782] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.106009] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 766.106697] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af727ca1-8153-4cd8-a2cc-b9733f57330d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.111183] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 766.111183] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5254a0a5-5828-6fac-a356-72d1d4e72896" [ 766.111183] env[62383]: _type = "Task" [ 766.111183] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.118851] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5254a0a5-5828-6fac-a356-72d1d4e72896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.447170] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.447515] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.447515] env[62383]: DEBUG nova.network.neutron [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 766.590553] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451571, 'name': ReconfigVM_Task, 'duration_secs': 0.325953} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.593146] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 4cd9c7be-c5f4-460b-a9e2-e8f778076947/4cd9c7be-c5f4-460b-a9e2-e8f778076947.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.594084] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b113e186-b0ad-4736-9893-cd61937e1722 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.601798] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 766.601798] env[62383]: value = "task-2451572" [ 766.601798] env[62383]: _type = "Task" [ 766.601798] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.617811] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451572, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.632578] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5254a0a5-5828-6fac-a356-72d1d4e72896, 'name': SearchDatastore_Task, 'duration_secs': 0.007916} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.633616] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-172eff28-8816-42f7-9e2d-e547fcd8fca0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.643892] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 766.643892] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20040-d56e-c5e3-a070-a47a92eea68d" [ 766.643892] env[62383]: _type = "Task" [ 766.643892] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.652583] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20040-d56e-c5e3-a070-a47a92eea68d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.679480] env[62383]: DEBUG nova.compute.manager [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Received event network-vif-plugged-79458cb2-668a-4c04-882f-c00f465ccd9d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 766.679711] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] Acquiring lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.679938] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] Lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.680083] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] Lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.680250] env[62383]: DEBUG nova.compute.manager [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] No waiting events found dispatching network-vif-plugged-79458cb2-668a-4c04-882f-c00f465ccd9d {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 766.680411] env[62383]: WARNING nova.compute.manager [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Received unexpected event network-vif-plugged-79458cb2-668a-4c04-882f-c00f465ccd9d for instance with vm_state building and task_state spawning. [ 766.680575] env[62383]: DEBUG nova.compute.manager [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Received event network-changed-79458cb2-668a-4c04-882f-c00f465ccd9d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 766.680740] env[62383]: DEBUG nova.compute.manager [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Refreshing instance network info cache due to event network-changed-79458cb2-668a-4c04-882f-c00f465ccd9d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 766.680898] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] Acquiring lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.967130] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978b7945-467c-4908-883f-314597d6dc65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.977058] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec1ca1a-0d19-4012-8965-4a19e77b2519 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.009173] env[62383]: DEBUG nova.network.neutron [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.011932] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608e36db-6502-48d1-a1ce-d41b4c785078 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.019455] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7de11a8-5e34-4a74-a266-aa6252e9f40f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.034343] env[62383]: DEBUG nova.compute.provider_tree [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.111717] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451572, 'name': Rename_Task, 'duration_secs': 0.163647} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.112046] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.112373] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c230db52-fedd-4b19-9da8-b42fc30b7ed6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.118664] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 767.118664] env[62383]: value = "task-2451573" [ 767.118664] env[62383]: _type = "Task" [ 767.118664] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.126374] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451573, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.157017] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f20040-d56e-c5e3-a070-a47a92eea68d, 'name': SearchDatastore_Task, 'duration_secs': 0.012204} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.157335] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.157568] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] e41f5c22-44e0-4de8-a4d0-865fe2c6febd/e41f5c22-44e0-4de8-a4d0-865fe2c6febd.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 767.157821] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e75ab2b7-a7b3-4478-8c4c-dd90928098ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.166445] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 767.166445] env[62383]: value = "task-2451574" [ 767.166445] env[62383]: _type = "Task" [ 767.166445] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.177468] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.211313] env[62383]: DEBUG nova.network.neutron [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance_info_cache with network_info: [{"id": "79458cb2-668a-4c04-882f-c00f465ccd9d", "address": "fa:16:3e:06:eb:ec", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79458cb2-66", "ovs_interfaceid": "79458cb2-668a-4c04-882f-c00f465ccd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.540196] env[62383]: DEBUG nova.scheduler.client.report [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.636240] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451573, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.681450] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451574, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.713749] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.714115] env[62383]: DEBUG nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Instance network_info: |[{"id": "79458cb2-668a-4c04-882f-c00f465ccd9d", "address": "fa:16:3e:06:eb:ec", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79458cb2-66", "ovs_interfaceid": "79458cb2-668a-4c04-882f-c00f465ccd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 767.716420] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] Acquired lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.716420] env[62383]: DEBUG nova.network.neutron [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Refreshing network info cache for port 79458cb2-668a-4c04-882f-c00f465ccd9d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.716420] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:eb:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79458cb2-668a-4c04-882f-c00f465ccd9d', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.726019] env[62383]: DEBUG oslo.service.loopingcall [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.727863] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 767.727863] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7776252e-f89f-4800-aed4-636487081030 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.747687] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.747687] env[62383]: value = "task-2451575" [ 767.747687] env[62383]: _type = "Task" [ 767.747687] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.756852] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451575, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.046012] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.046230] env[62383]: DEBUG nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 768.051727] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.103s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.053241] env[62383]: INFO nova.compute.claims [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.130250] env[62383]: DEBUG oslo_vmware.api [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2451573, 'name': PowerOnVM_Task, 'duration_secs': 0.761713} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.130582] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.130812] env[62383]: INFO nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Took 8.20 seconds to spawn the instance on the hypervisor. [ 768.131243] env[62383]: DEBUG nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.132024] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6634719d-ee64-499e-92a1-6eedbb8615b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.180055] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545857} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.180326] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] e41f5c22-44e0-4de8-a4d0-865fe2c6febd/e41f5c22-44e0-4de8-a4d0-865fe2c6febd.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.180663] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.180796] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f7e59f4-3785-406e-9749-096a32bd6bb1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.188533] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 768.188533] env[62383]: value = "task-2451576" [ 768.188533] env[62383]: _type = "Task" [ 768.188533] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.198259] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451576, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.257471] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451575, 'name': CreateVM_Task, 'duration_secs': 0.401249} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.257695] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 768.258470] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.259884] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.259884] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 768.259884] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42f8c9f2-fac8-40a8-af54-6677ba72d7bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.266416] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 768.266416] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521caf38-547b-5d87-5896-aa396c5700fd" [ 768.266416] env[62383]: _type = "Task" [ 768.266416] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.274030] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521caf38-547b-5d87-5896-aa396c5700fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.484298] env[62383]: DEBUG nova.network.neutron [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updated VIF entry in instance network info cache for port 79458cb2-668a-4c04-882f-c00f465ccd9d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 768.484665] env[62383]: DEBUG nova.network.neutron [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance_info_cache with network_info: [{"id": "79458cb2-668a-4c04-882f-c00f465ccd9d", "address": "fa:16:3e:06:eb:ec", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79458cb2-66", "ovs_interfaceid": "79458cb2-668a-4c04-882f-c00f465ccd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.561940] env[62383]: DEBUG nova.compute.utils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 768.563997] env[62383]: DEBUG nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 768.564244] env[62383]: DEBUG nova.network.neutron [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.603865] env[62383]: DEBUG nova.policy [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7111f98afebe43d48615445b7fd4596d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28e37dc42ac74824b43bd4b120a52674', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 768.648405] env[62383]: INFO nova.compute.manager [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Took 51.63 seconds to build instance. [ 768.698525] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451576, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061654} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.698843] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 768.699662] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3878b5-cfdf-4009-be6d-a32056916fa4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.725549] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] e41f5c22-44e0-4de8-a4d0-865fe2c6febd/e41f5c22-44e0-4de8-a4d0-865fe2c6febd.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.725705] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f0aa509-9b03-4662-8b70-d6d6fdede80b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.747021] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 768.747021] env[62383]: value = "task-2451577" [ 768.747021] env[62383]: _type = "Task" [ 768.747021] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.755500] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451577, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.774875] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521caf38-547b-5d87-5896-aa396c5700fd, 'name': SearchDatastore_Task, 'duration_secs': 0.025172} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.775284] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.775581] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.775881] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.776096] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.776332] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.776673] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aca752bb-af6e-4b01-aed8-b15c11c580f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.794284] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.794786] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.795564] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-affbecc6-6cfd-4467-ac67-1a61fb9793ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.803191] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 768.803191] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52853554-eb51-22f7-4e06-5ba918da015b" [ 768.803191] env[62383]: _type = "Task" [ 768.803191] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.813449] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52853554-eb51-22f7-4e06-5ba918da015b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.883220] env[62383]: DEBUG nova.network.neutron [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Successfully created port: d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.987273] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f5fd8cf-6405-4309-ba32-a0dbd0f9b5fa req-760f0772-26d8-4dee-b19a-6f13b93335ed service nova] Releasing lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.068274] env[62383]: DEBUG nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 769.153484] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06715be2-b6d0-4106-a392-1dd163128527 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.780s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.256624] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451577, 'name': ReconfigVM_Task, 'duration_secs': 0.436918} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.259220] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Reconfigured VM instance instance-00000036 to attach disk [datastore2] e41f5c22-44e0-4de8-a4d0-865fe2c6febd/e41f5c22-44e0-4de8-a4d0-865fe2c6febd.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 769.259531] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62383) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 769.260640] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-f971b140-ff1a-478a-a5f8-19cb41e77210 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.267350] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 769.267350] env[62383]: value = "task-2451578" [ 769.267350] env[62383]: _type = "Task" [ 769.267350] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.274359] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451578, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.315749] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52853554-eb51-22f7-4e06-5ba918da015b, 'name': SearchDatastore_Task, 'duration_secs': 0.026944} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.321356] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ca2c6c7-e0da-4fbe-bc8a-3208adc7d3c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.327859] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 769.327859] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520ba6d0-454a-2a42-784f-fa729372ccf7" [ 769.327859] env[62383]: _type = "Task" [ 769.327859] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.337236] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520ba6d0-454a-2a42-784f-fa729372ccf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.716901] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d53510-adf5-447e-92e8-a75502170fb6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.724280] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade80d3b-7879-4845-89aa-6e52fb264c91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.754705] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d85ae0-8a8e-4943-ae97-599fce7870dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.763114] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a3cc99-b857-4542-b136-976e1e86ea2d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.776632] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451578, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.054865} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.783857] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62383) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 769.784497] env[62383]: DEBUG nova.compute.provider_tree [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.786169] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf2f2dd-f450-44ad-ae16-221fbdb457fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.812260] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] e41f5c22-44e0-4de8-a4d0-865fe2c6febd/ephemeral_0.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.813185] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47271687-42da-4f61-a37d-0c595811fc3f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.833627] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 769.833627] env[62383]: value = "task-2451579" [ 769.833627] env[62383]: _type = "Task" [ 769.833627] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.840545] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520ba6d0-454a-2a42-784f-fa729372ccf7, 'name': SearchDatastore_Task, 'duration_secs': 0.009752} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.841138] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.841403] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 93234e99-268f-491e-96bd-a77f4c9f164b/93234e99-268f-491e-96bd-a77f4c9f164b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 769.841893] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f458d928-0a82-45fb-b090-b71cab9ef5e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.846649] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451579, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.850963] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 769.850963] env[62383]: value = "task-2451580" [ 769.850963] env[62383]: _type = "Task" [ 769.850963] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.858798] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.872588] env[62383]: DEBUG nova.compute.manager [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received event network-changed-e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 769.873147] env[62383]: DEBUG nova.compute.manager [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Refreshing instance network info cache due to event network-changed-e527ebe3-bc3d-4e96-8325-891e543bdb39. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 769.873147] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] Acquiring lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.873147] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] Acquired lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.873505] env[62383]: DEBUG nova.network.neutron [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Refreshing network info cache for port e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 770.081053] env[62383]: DEBUG nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 770.109528] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 770.109958] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.109958] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 770.110215] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.110447] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 770.110649] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 770.110932] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 770.111152] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 770.111332] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 770.111501] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 770.111680] env[62383]: DEBUG nova.virt.hardware [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 770.112603] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea7d8e5-ed9f-4f2b-8f6a-0efe966936b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.121314] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c83aa27-fd54-40ac-9004-e2626547cf04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.309203] env[62383]: ERROR nova.scheduler.client.report [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [req-9484c252-3c6c-4eed-be01-fe351e264140] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9484c252-3c6c-4eed-be01-fe351e264140"}]} [ 770.329590] env[62383]: DEBUG nova.scheduler.client.report [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 770.338318] env[62383]: DEBUG nova.compute.manager [req-44490bb7-1586-448b-8146-1ba2ee5acc2c req-c2cb17a7-3838-4506-9753-2a17f06ed181 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Received event network-vif-plugged-d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 770.339578] env[62383]: DEBUG oslo_concurrency.lockutils [req-44490bb7-1586-448b-8146-1ba2ee5acc2c req-c2cb17a7-3838-4506-9753-2a17f06ed181 service nova] Acquiring lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.339578] env[62383]: DEBUG oslo_concurrency.lockutils [req-44490bb7-1586-448b-8146-1ba2ee5acc2c req-c2cb17a7-3838-4506-9753-2a17f06ed181 service nova] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.339578] env[62383]: DEBUG oslo_concurrency.lockutils [req-44490bb7-1586-448b-8146-1ba2ee5acc2c req-c2cb17a7-3838-4506-9753-2a17f06ed181 service nova] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.339578] env[62383]: DEBUG nova.compute.manager [req-44490bb7-1586-448b-8146-1ba2ee5acc2c req-c2cb17a7-3838-4506-9753-2a17f06ed181 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] No waiting events found dispatching network-vif-plugged-d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 770.339578] env[62383]: WARNING nova.compute.manager [req-44490bb7-1586-448b-8146-1ba2ee5acc2c req-c2cb17a7-3838-4506-9753-2a17f06ed181 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Received unexpected event network-vif-plugged-d2a0fa64-9d35-4dfd-8124-3099b780d7b7 for instance with vm_state building and task_state spawning. [ 770.353618] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.360781] env[62383]: DEBUG nova.scheduler.client.report [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 770.361170] env[62383]: DEBUG nova.compute.provider_tree [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 770.367218] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486946} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.368026] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 93234e99-268f-491e-96bd-a77f4c9f164b/93234e99-268f-491e-96bd-a77f4c9f164b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 770.368026] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 770.368026] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56f54d5e-4e00-405f-b573-8ca869b6b611 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.374523] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 770.374523] env[62383]: value = "task-2451581" [ 770.374523] env[62383]: _type = "Task" [ 770.374523] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.375589] env[62383]: DEBUG nova.scheduler.client.report [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 770.388461] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.409513] env[62383]: DEBUG nova.scheduler.client.report [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 770.498742] env[62383]: DEBUG nova.network.neutron [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Successfully updated port: d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.656953] env[62383]: DEBUG nova.network.neutron [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updated VIF entry in instance network info cache for port e527ebe3-bc3d-4e96-8325-891e543bdb39. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 770.657375] env[62383]: DEBUG nova.network.neutron [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.848020] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451579, 'name': ReconfigVM_Task, 'duration_secs': 0.618478} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.848367] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Reconfigured VM instance instance-00000036 to attach disk [datastore2] e41f5c22-44e0-4de8-a4d0-865fe2c6febd/ephemeral_0.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.848703] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e6fd6dc-61a0-49a5-b106-c757d3bacc9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.854924] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 770.854924] env[62383]: value = "task-2451582" [ 770.854924] env[62383]: _type = "Task" [ 770.854924] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.865844] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451582, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.886937] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070354} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.890315] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.891616] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb101a9-607b-4872-8d38-fa2c8631565c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.913075] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 93234e99-268f-491e-96bd-a77f4c9f164b/93234e99-268f-491e-96bd-a77f4c9f164b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.916014] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79b0a8a7-3866-4170-ad93-b34db0007581 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.935117] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 770.935117] env[62383]: value = "task-2451583" [ 770.935117] env[62383]: _type = "Task" [ 770.935117] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.944250] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451583, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.952315] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7768f5-a86c-4f06-85df-fb4a6541f0bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.959454] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf35bd9-8a6f-47c7-878b-fc880ee8dd58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.990472] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6da5613-8601-4781-9184-7d9da2f705e0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.998092] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7f12f1-cbd5-431c-b110-2f586416d621 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.012148] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 771.012312] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.012463] env[62383]: DEBUG nova.network.neutron [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.013819] env[62383]: DEBUG nova.compute.provider_tree [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 771.160232] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e901752-6b61-45a3-befd-3bb7e1169650 req-1056d856-7f8c-460d-a995-5484072e490e service nova] Releasing lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.364804] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451582, 'name': Rename_Task, 'duration_secs': 0.471929} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.365059] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.365305] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-464405fa-3e19-4d71-8ff2-3aaafeac5b1c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.370833] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 771.370833] env[62383]: value = "task-2451584" [ 771.370833] env[62383]: _type = "Task" [ 771.370833] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.378185] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.444359] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451583, 'name': ReconfigVM_Task, 'duration_secs': 0.40322} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.444618] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 93234e99-268f-491e-96bd-a77f4c9f164b/93234e99-268f-491e-96bd-a77f4c9f164b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.445260] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f563b9b-385a-4822-905e-d99d5e71a8c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.451901] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 771.451901] env[62383]: value = "task-2451585" [ 771.451901] env[62383]: _type = "Task" [ 771.451901] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.459210] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451585, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.550043] env[62383]: DEBUG nova.scheduler.client.report [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 771.550043] env[62383]: DEBUG nova.compute.provider_tree [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 82 to 83 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 771.550334] env[62383]: DEBUG nova.compute.provider_tree [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 771.555165] env[62383]: DEBUG nova.network.neutron [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.692367] env[62383]: DEBUG nova.network.neutron [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updating instance_info_cache with network_info: [{"id": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "address": "fa:16:3e:51:f7:25", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0fa64-9d", "ovs_interfaceid": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.881047] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451584, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.960938] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451585, 'name': Rename_Task, 'duration_secs': 0.134866} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.961278] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.961534] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2edc5f50-3426-46f9-bc68-1faf97ca100d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.970639] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 771.970639] env[62383]: value = "task-2451586" [ 771.970639] env[62383]: _type = "Task" [ 771.970639] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.978616] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.059062] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.007s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.059646] env[62383]: DEBUG nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 772.062328] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.609s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.063709] env[62383]: INFO nova.compute.claims [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.194855] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Releasing lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.195324] env[62383]: DEBUG nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Instance network_info: |[{"id": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "address": "fa:16:3e:51:f7:25", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0fa64-9d", "ovs_interfaceid": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.195829] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:f7:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2a0fa64-9d35-4dfd-8124-3099b780d7b7', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.203546] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Creating folder: Project (28e37dc42ac74824b43bd4b120a52674). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.203860] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce5f9a69-543d-468b-ab4b-bd0b114c30fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.215283] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Created folder: Project (28e37dc42ac74824b43bd4b120a52674) in parent group-v496304. [ 772.215527] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Creating folder: Instances. Parent ref: group-v496470. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.215803] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13228307-0cb5-4aed-94be-3b0710ec7e99 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.226018] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Created folder: Instances in parent group-v496470. [ 772.226298] env[62383]: DEBUG oslo.service.loopingcall [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.226499] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.226745] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2d8ed25-35e2-48c6-875b-fd2c6359522f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.249361] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.249361] env[62383]: value = "task-2451589" [ 772.249361] env[62383]: _type = "Task" [ 772.249361] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.257854] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451589, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.365664] env[62383]: DEBUG nova.compute.manager [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Received event network-changed-d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 772.365890] env[62383]: DEBUG nova.compute.manager [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Refreshing instance network info cache due to event network-changed-d2a0fa64-9d35-4dfd-8124-3099b780d7b7. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 772.366158] env[62383]: DEBUG oslo_concurrency.lockutils [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] Acquiring lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.366307] env[62383]: DEBUG oslo_concurrency.lockutils [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] Acquired lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.366467] env[62383]: DEBUG nova.network.neutron [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Refreshing network info cache for port d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.381466] env[62383]: DEBUG oslo_vmware.api [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451584, 'name': PowerOnVM_Task, 'duration_secs': 0.984058} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.381708] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.381904] env[62383]: INFO nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Took 9.28 seconds to spawn the instance on the hypervisor. [ 772.382095] env[62383]: DEBUG nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.382846] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093b5673-c25b-4ca2-81e6-8d6530f26596 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.479737] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451586, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.568237] env[62383]: DEBUG nova.compute.utils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.573313] env[62383]: DEBUG nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.573554] env[62383]: DEBUG nova.network.neutron [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.615643] env[62383]: DEBUG nova.policy [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b240965406114137914b03ac96806ea2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c04aced555934225bc58a044bfb4bc35', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 772.759334] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451589, 'name': CreateVM_Task, 'duration_secs': 0.342267} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.759474] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 772.760448] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.760448] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.760652] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 772.760898] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee4e6d3d-ea9a-43b4-99c7-1157b0eac88b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.765383] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 772.765383] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d549d9-0262-9e35-3231-acde2139af25" [ 772.765383] env[62383]: _type = "Task" [ 772.765383] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.772702] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d549d9-0262-9e35-3231-acde2139af25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.898191] env[62383]: DEBUG nova.network.neutron [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Successfully created port: cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.909581] env[62383]: INFO nova.compute.manager [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Took 54.33 seconds to build instance. [ 772.983206] env[62383]: DEBUG oslo_vmware.api [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451586, 'name': PowerOnVM_Task, 'duration_secs': 0.6354} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.983544] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.983798] env[62383]: INFO nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Took 7.69 seconds to spawn the instance on the hypervisor. [ 772.984030] env[62383]: DEBUG nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.984899] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3683c124-a65c-450d-b67b-142e2fa805e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.074596] env[62383]: DEBUG nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 773.185276] env[62383]: DEBUG nova.network.neutron [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updated VIF entry in instance network info cache for port d2a0fa64-9d35-4dfd-8124-3099b780d7b7. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.185689] env[62383]: DEBUG nova.network.neutron [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updating instance_info_cache with network_info: [{"id": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "address": "fa:16:3e:51:f7:25", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0fa64-9d", "ovs_interfaceid": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.276725] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d549d9-0262-9e35-3231-acde2139af25, 'name': SearchDatastore_Task, 'duration_secs': 0.011207} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.276999] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.277298] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 773.277507] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.277619] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.277802] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 773.278084] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d0401e2-26ec-494a-bfcf-cf49783bf208 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.286283] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 773.287028] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 773.287222] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22447c7e-f813-4cde-bea0-be36b07d1805 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.298190] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 773.298190] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b95c46-3ace-259e-cc80-e896303de4f7" [ 773.298190] env[62383]: _type = "Task" [ 773.298190] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.306308] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b95c46-3ace-259e-cc80-e896303de4f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.411873] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c5cb5fe7-3dd6-496e-bc37-2858d22e6b23 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.722s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.510617] env[62383]: INFO nova.compute.manager [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Took 50.98 seconds to build instance. [ 773.652016] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a74f75-00a8-433f-8c8c-8557aee79d67 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.660157] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580db963-d5be-4859-bc33-7281c47a65ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.689607] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696fcf7e-50a6-4af9-9ec7-f394e361a996 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.692240] env[62383]: DEBUG oslo_concurrency.lockutils [req-1b910196-9cda-45f8-ab39-6e5030c1ae7e req-51766aaa-9641-4026-8153-a096513960b2 service nova] Releasing lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.697301] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d62524-22da-4b20-9f70-793660aca4cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.711301] env[62383]: DEBUG nova.compute.provider_tree [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.807914] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b95c46-3ace-259e-cc80-e896303de4f7, 'name': SearchDatastore_Task, 'duration_secs': 0.01032} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.808738] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1e8c349-1726-4217-a205-f4186c88b11b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.814269] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 773.814269] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525f5a23-20e4-5443-7fe5-57d57784acc9" [ 773.814269] env[62383]: _type = "Task" [ 773.814269] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.822109] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525f5a23-20e4-5443-7fe5-57d57784acc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.012007] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed298c19-c7bb-4e19-8a7e-3e78bd9354b2 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.201s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.043612] env[62383]: DEBUG nova.compute.manager [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Received event network-changed-66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.043664] env[62383]: DEBUG nova.compute.manager [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Refreshing instance network info cache due to event network-changed-66ff13c1-430d-414e-a83a-3d9e7536688a. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 774.043851] env[62383]: DEBUG oslo_concurrency.lockutils [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] Acquiring lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.043995] env[62383]: DEBUG oslo_concurrency.lockutils [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] Acquired lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.044287] env[62383]: DEBUG nova.network.neutron [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Refreshing network info cache for port 66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.098014] env[62383]: DEBUG nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 774.132033] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 774.132656] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 774.132656] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 774.132656] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 774.132854] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 774.133082] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 774.133156] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 774.133299] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 774.133468] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 774.133635] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 774.133821] env[62383]: DEBUG nova.virt.hardware [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 774.135027] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bf1d92-fd70-4f33-8c9c-5de5a81058e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.143039] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d434e2-80ac-44ea-a55d-b76bcc96a2ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.214034] env[62383]: DEBUG nova.scheduler.client.report [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.328057] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525f5a23-20e4-5443-7fe5-57d57784acc9, 'name': SearchDatastore_Task, 'duration_secs': 0.032155} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.328821] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.328821] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 5ef22e87-f73c-47ba-b925-2bd2effe74eb/5ef22e87-f73c-47ba-b925-2bd2effe74eb.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 774.328985] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5d043a4-f705-4015-a21d-60f32a53e889 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.335778] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 774.335778] env[62383]: value = "task-2451590" [ 774.335778] env[62383]: _type = "Task" [ 774.335778] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.346451] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.604186] env[62383]: DEBUG nova.compute.manager [req-c44ee9fc-9099-414f-a27f-3b02f7277f1d req-4081d91a-666d-4e90-a607-dcd61d95811b service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Received event network-vif-plugged-cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 774.604186] env[62383]: DEBUG oslo_concurrency.lockutils [req-c44ee9fc-9099-414f-a27f-3b02f7277f1d req-4081d91a-666d-4e90-a607-dcd61d95811b service nova] Acquiring lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.604186] env[62383]: DEBUG oslo_concurrency.lockutils [req-c44ee9fc-9099-414f-a27f-3b02f7277f1d req-4081d91a-666d-4e90-a607-dcd61d95811b service nova] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.604186] env[62383]: DEBUG oslo_concurrency.lockutils [req-c44ee9fc-9099-414f-a27f-3b02f7277f1d req-4081d91a-666d-4e90-a607-dcd61d95811b service nova] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.604186] env[62383]: DEBUG nova.compute.manager [req-c44ee9fc-9099-414f-a27f-3b02f7277f1d req-4081d91a-666d-4e90-a607-dcd61d95811b service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] No waiting events found dispatching network-vif-plugged-cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.605049] env[62383]: WARNING nova.compute.manager [req-c44ee9fc-9099-414f-a27f-3b02f7277f1d req-4081d91a-666d-4e90-a607-dcd61d95811b service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Received unexpected event network-vif-plugged-cc7bb81b-ee7e-4bd2-8c93-c133276ee413 for instance with vm_state building and task_state spawning. [ 774.721118] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.721118] env[62383]: DEBUG nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 774.723221] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 39.406s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 774.785343] env[62383]: DEBUG nova.network.neutron [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Successfully updated port: cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.848994] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451590, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.903350] env[62383]: DEBUG nova.network.neutron [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Updated VIF entry in instance network info cache for port 66ff13c1-430d-414e-a83a-3d9e7536688a. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.903797] env[62383]: DEBUG nova.network.neutron [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Updating instance_info_cache with network_info: [{"id": "66ff13c1-430d-414e-a83a-3d9e7536688a", "address": "fa:16:3e:b3:22:55", "network": {"id": "15922417-5941-4372-b068-7f6b0a8c7335", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1040646301-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.162", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75346deaf9ad40fa925d4aff9fdff2cc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66ff13c1-43", "ovs_interfaceid": "66ff13c1-430d-414e-a83a-3d9e7536688a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.228358] env[62383]: INFO nova.compute.claims [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.232973] env[62383]: DEBUG nova.compute.utils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 775.234404] env[62383]: DEBUG nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 775.234578] env[62383]: DEBUG nova.network.neutron [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.273289] env[62383]: DEBUG nova.policy [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02daed55abc149a2a3f4502400b674eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a657c912fef04c5ca8c0b5d96a8a3064', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.288391] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.288644] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.288869] env[62383]: DEBUG nova.network.neutron [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 775.348796] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451590, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.406763] env[62383]: DEBUG oslo_concurrency.lockutils [req-617b2c8c-a466-473d-8fff-bfd704c0e5ac req-a4861229-1c28-46af-b116-5e8766d12180 service nova] Releasing lock "refresh_cache-e41f5c22-44e0-4de8-a4d0-865fe2c6febd" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.530571] env[62383]: DEBUG nova.compute.manager [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 775.573873] env[62383]: DEBUG nova.network.neutron [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Successfully created port: a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.736176] env[62383]: INFO nova.compute.resource_tracker [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating resource usage from migration e6a5a341-cb15-4a73-bdeb-a5f56a64c08a [ 775.739973] env[62383]: DEBUG nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 775.827588] env[62383]: DEBUG nova.network.neutron [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.849775] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451590, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.493289} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.853199] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 5ef22e87-f73c-47ba-b925-2bd2effe74eb/5ef22e87-f73c-47ba-b925-2bd2effe74eb.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 775.853647] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 775.856170] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06b2764f-dc8d-4ee0-a4a6-9be40c9c365f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.863419] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 775.863419] env[62383]: value = "task-2451591" [ 775.863419] env[62383]: _type = "Task" [ 775.863419] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.872361] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.034829] env[62383]: DEBUG nova.network.neutron [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updating instance_info_cache with network_info: [{"id": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "address": "fa:16:3e:8c:8c:21", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc7bb81b-ee", "ovs_interfaceid": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.048120] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.285919] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76466bd8-ff58-4d47-84c7-5512c35d4c2c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.295510] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0675a1-249e-48ff-8629-df5933bbc6cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.325927] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30e3f8e-9247-4b6a-9c0f-a19dbc62206e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.333233] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd8dfbd-e578-4844-ad0b-3d6364a9d3a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.346187] env[62383]: DEBUG nova.compute.provider_tree [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.373153] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073382} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.373430] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 776.374184] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb89288-37ad-4faa-80da-b56dff1a273e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.395785] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 5ef22e87-f73c-47ba-b925-2bd2effe74eb/5ef22e87-f73c-47ba-b925-2bd2effe74eb.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 776.396357] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fca0fda3-24a6-47b0-8068-304839652928 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.418772] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 776.418772] env[62383]: value = "task-2451592" [ 776.418772] env[62383]: _type = "Task" [ 776.418772] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.426470] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451592, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.540571] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Releasing lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.540898] env[62383]: DEBUG nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Instance network_info: |[{"id": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "address": "fa:16:3e:8c:8c:21", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc7bb81b-ee", "ovs_interfaceid": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 776.541354] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:8c:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f41e4aa-0d23-48c4-a359-574abb2e7b9a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc7bb81b-ee7e-4bd2-8c93-c133276ee413', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.548853] env[62383]: DEBUG oslo.service.loopingcall [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 776.549094] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.549462] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-584d9e9e-5ca2-4e0e-be9a-7f41a8d902b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.571286] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.571286] env[62383]: value = "task-2451593" [ 776.571286] env[62383]: _type = "Task" [ 776.571286] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.578837] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451593, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.627948] env[62383]: DEBUG nova.compute.manager [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Received event network-changed-cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 776.628223] env[62383]: DEBUG nova.compute.manager [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Refreshing instance network info cache due to event network-changed-cc7bb81b-ee7e-4bd2-8c93-c133276ee413. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 776.628346] env[62383]: DEBUG oslo_concurrency.lockutils [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] Acquiring lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.628454] env[62383]: DEBUG oslo_concurrency.lockutils [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] Acquired lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.628621] env[62383]: DEBUG nova.network.neutron [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Refreshing network info cache for port cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.749531] env[62383]: DEBUG nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.777454] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.777714] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.777873] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.778069] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.778250] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.778400] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.778607] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.778812] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.778993] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.779176] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.779364] env[62383]: DEBUG nova.virt.hardware [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.780348] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b882dfb-1839-445c-b6f3-1555998396f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.788828] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dec178-ba2a-4719-b60a-101a5943e4f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.849751] env[62383]: DEBUG nova.scheduler.client.report [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 776.931891] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451592, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.084224] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451593, 'name': CreateVM_Task, 'duration_secs': 0.379187} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.084224] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.084843] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.084999] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.085941] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 777.085941] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6a91868-5c3d-4039-b2d5-581b18e43f7f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.090615] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 777.090615] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ea6ce7-13f5-00a7-7f3c-94bff8572800" [ 777.090615] env[62383]: _type = "Task" [ 777.090615] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.099135] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ea6ce7-13f5-00a7-7f3c-94bff8572800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.190113] env[62383]: DEBUG nova.network.neutron [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Successfully updated port: a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.351419] env[62383]: DEBUG nova.network.neutron [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updated VIF entry in instance network info cache for port cc7bb81b-ee7e-4bd2-8c93-c133276ee413. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.351952] env[62383]: DEBUG nova.network.neutron [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updating instance_info_cache with network_info: [{"id": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "address": "fa:16:3e:8c:8c:21", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc7bb81b-ee", "ovs_interfaceid": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.359011] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.636s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.359198] env[62383]: INFO nova.compute.manager [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Migrating [ 777.379021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.487s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.379021] env[62383]: DEBUG nova.objects.instance [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lazy-loading 'resources' on Instance uuid f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.429873] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451592, 'name': ReconfigVM_Task, 'duration_secs': 0.562524} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.430163] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 5ef22e87-f73c-47ba-b925-2bd2effe74eb/5ef22e87-f73c-47ba-b925-2bd2effe74eb.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.430765] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4d129e3-0693-4ad1-9bf4-49e2720f26e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.437579] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 777.437579] env[62383]: value = "task-2451594" [ 777.437579] env[62383]: _type = "Task" [ 777.437579] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.445286] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451594, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.600853] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ea6ce7-13f5-00a7-7f3c-94bff8572800, 'name': SearchDatastore_Task, 'duration_secs': 0.011335} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.601283] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.601599] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 777.601858] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.602016] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.602202] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 777.602574] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-133ebd0c-0478-4f8d-b50a-49152d104f11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.610385] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 777.610563] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 777.611271] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfc94a06-45b1-4917-b26c-fe5956ae31a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.616022] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 777.616022] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e5b3dc-5d2e-3735-acd2-0e9713604e0c" [ 777.616022] env[62383]: _type = "Task" [ 777.616022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.623349] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e5b3dc-5d2e-3735-acd2-0e9713604e0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.691103] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.691329] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.691538] env[62383]: DEBUG nova.network.neutron [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.858236] env[62383]: DEBUG oslo_concurrency.lockutils [req-bd396722-8fe9-486a-8a0a-f30247fff5d6 req-e6d8a23b-508f-4fc0-9c9f-5b66d0d39aa0 service nova] Releasing lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.882411] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.882649] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.882867] env[62383]: DEBUG nova.network.neutron [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.948012] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451594, 'name': Rename_Task, 'duration_secs': 0.378615} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.948012] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 777.948241] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2762d83a-af16-4b40-9e3e-55379e949e1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.953989] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 777.953989] env[62383]: value = "task-2451595" [ 777.953989] env[62383]: _type = "Task" [ 777.953989] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.963646] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.126210] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e5b3dc-5d2e-3735-acd2-0e9713604e0c, 'name': SearchDatastore_Task, 'duration_secs': 0.01096} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.130648] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74de0bf8-30a5-4fba-bb24-668c6a4bf775 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.136842] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 778.136842] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ff3870-6af8-d235-00c3-8fc9ec9fa490" [ 778.136842] env[62383]: _type = "Task" [ 778.136842] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.145903] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ff3870-6af8-d235-00c3-8fc9ec9fa490, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.230213] env[62383]: DEBUG nova.network.neutron [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.351352] env[62383]: DEBUG nova.network.neutron [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Updating instance_info_cache with network_info: [{"id": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "address": "fa:16:3e:2b:e9:9a", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6625886-1c", "ovs_interfaceid": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.353816] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744ff884-977d-4dcc-931e-2f2e11f5ca28 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.361975] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7525a475-20f4-400a-9bbf-46f9ad86158b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.395967] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d437714c-2f2d-4208-a1f8-c2f216dc5465 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.404045] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e48eb39-8c0c-49ce-80a4-9e415c76c01a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.420094] env[62383]: DEBUG nova.compute.provider_tree [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.466535] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451595, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.604428] env[62383]: DEBUG nova.network.neutron [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance_info_cache with network_info: [{"id": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "address": "fa:16:3e:9b:eb:a7", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap796c3e3e-48", "ovs_interfaceid": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.648175] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ff3870-6af8-d235-00c3-8fc9ec9fa490, 'name': SearchDatastore_Task, 'duration_secs': 0.010713} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.648175] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.648175] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] bc1e1f0c-a86d-4d31-a8c4-45d362e9b807/bc1e1f0c-a86d-4d31-a8c4-45d362e9b807.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 778.648175] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aadf937d-918f-4527-8672-1a7a568640d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.652397] env[62383]: DEBUG nova.compute.manager [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Received event network-vif-plugged-a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 778.652397] env[62383]: DEBUG oslo_concurrency.lockutils [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] Acquiring lock "a27fcace-4fb3-48fb-946d-b8057f6ee601-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.652397] env[62383]: DEBUG oslo_concurrency.lockutils [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.652520] env[62383]: DEBUG oslo_concurrency.lockutils [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.652632] env[62383]: DEBUG nova.compute.manager [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] No waiting events found dispatching network-vif-plugged-a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 778.652796] env[62383]: WARNING nova.compute.manager [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Received unexpected event network-vif-plugged-a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 for instance with vm_state building and task_state spawning. [ 778.652957] env[62383]: DEBUG nova.compute.manager [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Received event network-changed-a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 778.653128] env[62383]: DEBUG nova.compute.manager [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Refreshing instance network info cache due to event network-changed-a6625886-1cc1-4c4d-bd08-6b5221d4a2c2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 778.653293] env[62383]: DEBUG oslo_concurrency.lockutils [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] Acquiring lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.658545] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 778.658545] env[62383]: value = "task-2451596" [ 778.658545] env[62383]: _type = "Task" [ 778.658545] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.666265] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451596, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.858237] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.858742] env[62383]: DEBUG nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Instance network_info: |[{"id": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "address": "fa:16:3e:2b:e9:9a", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6625886-1c", "ovs_interfaceid": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 778.859338] env[62383]: DEBUG oslo_concurrency.lockutils [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] Acquired lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.859485] env[62383]: DEBUG nova.network.neutron [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Refreshing network info cache for port a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.860885] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:e9:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5efce30e-48dd-493a-a354-f562a8adf7af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6625886-1cc1-4c4d-bd08-6b5221d4a2c2', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 778.870953] env[62383]: DEBUG oslo.service.loopingcall [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.872445] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 778.872775] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-810acbc6-0ed8-440d-9752-3aeac12a7f33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.897934] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 778.897934] env[62383]: value = "task-2451597" [ 778.897934] env[62383]: _type = "Task" [ 778.897934] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.906850] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451597, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.923736] env[62383]: DEBUG nova.scheduler.client.report [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 778.968190] env[62383]: DEBUG oslo_vmware.api [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451595, 'name': PowerOnVM_Task, 'duration_secs': 0.64199} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.968478] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 778.968738] env[62383]: INFO nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Took 8.89 seconds to spawn the instance on the hypervisor. [ 778.968952] env[62383]: DEBUG nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 778.969871] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c325b72d-04fd-49e4-88e3-367f7ace2640 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.107772] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.168311] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451596, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459372} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.168596] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] bc1e1f0c-a86d-4d31-a8c4-45d362e9b807/bc1e1f0c-a86d-4d31-a8c4-45d362e9b807.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.168931] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.169219] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-348da0c6-ef35-4a23-8648-affb5fc28588 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.175780] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 779.175780] env[62383]: value = "task-2451598" [ 779.175780] env[62383]: _type = "Task" [ 779.175780] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.183480] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451598, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.413383] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451597, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.429462] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.055s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.432743] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.405s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.432988] env[62383]: DEBUG nova.objects.instance [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lazy-loading 'resources' on Instance uuid 583138d1-f928-4e33-a443-11c627203c44 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 779.451642] env[62383]: INFO nova.scheduler.client.report [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Deleted allocations for instance f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf [ 779.486140] env[62383]: INFO nova.compute.manager [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Took 46.68 seconds to build instance. [ 779.584174] env[62383]: DEBUG nova.network.neutron [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Updated VIF entry in instance network info cache for port a6625886-1cc1-4c4d-bd08-6b5221d4a2c2. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.584594] env[62383]: DEBUG nova.network.neutron [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Updating instance_info_cache with network_info: [{"id": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "address": "fa:16:3e:2b:e9:9a", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6625886-1c", "ovs_interfaceid": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.685718] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451598, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066698} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.686198] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.686990] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90548adf-ca68-4b20-97c9-bf5e0522fdf3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.708965] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] bc1e1f0c-a86d-4d31-a8c4-45d362e9b807/bc1e1f0c-a86d-4d31-a8c4-45d362e9b807.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.709265] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4f110f8-0c29-4043-ac22-f549690215ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.729200] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 779.729200] env[62383]: value = "task-2451599" [ 779.729200] env[62383]: _type = "Task" [ 779.729200] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.736778] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.911201] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451597, 'name': CreateVM_Task, 'duration_secs': 0.609334} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.911382] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 779.912064] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.912239] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.912576] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 779.912840] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d8b838f-2b29-4613-ac70-ecafc16e3d86 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.917937] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 779.917937] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52003569-7a97-d8e7-fcc6-63dd7a6c1a22" [ 779.917937] env[62383]: _type = "Task" [ 779.917937] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.925817] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52003569-7a97-d8e7-fcc6-63dd7a6c1a22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.961248] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8ab77d26-c7c8-4963-89b2-c869f8fd60c2 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.599s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.989771] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d7c3b38-aa91-4429-b365-5aab70350687 tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.658s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.087190] env[62383]: DEBUG oslo_concurrency.lockutils [req-3153a86e-01cb-4066-85f9-2daade7229dc req-a5fe14d7-850b-4fb2-94f9-c98f7fbad3c7 service nova] Releasing lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.242795] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451599, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.307849] env[62383]: DEBUG nova.compute.manager [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Received event network-changed-d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 780.307849] env[62383]: DEBUG nova.compute.manager [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Refreshing instance network info cache due to event network-changed-d2a0fa64-9d35-4dfd-8124-3099b780d7b7. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 780.307849] env[62383]: DEBUG oslo_concurrency.lockutils [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] Acquiring lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.307849] env[62383]: DEBUG oslo_concurrency.lockutils [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] Acquired lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.307849] env[62383]: DEBUG nova.network.neutron [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Refreshing network info cache for port d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 780.431801] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52003569-7a97-d8e7-fcc6-63dd7a6c1a22, 'name': SearchDatastore_Task, 'duration_secs': 0.014561} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.432252] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.432509] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.432892] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.432892] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.433093] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.433239] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3377f41-99b5-4f0a-b6bb-182953c3db81 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.446305] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.446557] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 780.447391] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afb1f80d-9a9c-4b07-9e1c-65e54dc5da7b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.453539] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 780.453539] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525028c4-bdac-77f5-65eb-2d64ade307cc" [ 780.453539] env[62383]: _type = "Task" [ 780.453539] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.465597] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525028c4-bdac-77f5-65eb-2d64ade307cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.473442] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c36ae0-937f-47e7-b330-75ced1650fc2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.481254] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18dd5a7b-375b-4c6a-b9ec-e03e782dc890 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.513453] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdf4a3b-b5db-4f85-a486-4bc757bee7b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.521333] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aaf547-f1f3-4333-8cca-b8e626d302b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.537165] env[62383]: DEBUG nova.compute.provider_tree [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.625748] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61083004-c999-421e-89fa-d1cdbab9fa18 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.647460] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance '9604eadf-a027-46dd-989b-0d4b752f883a' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 780.746517] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451599, 'name': ReconfigVM_Task, 'duration_secs': 0.789246} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.747351] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Reconfigured VM instance instance-00000039 to attach disk [datastore1] bc1e1f0c-a86d-4d31-a8c4-45d362e9b807/bc1e1f0c-a86d-4d31-a8c4-45d362e9b807.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.747487] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2020aad-c2a7-4900-baa3-aedef5ac5869 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.756023] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 780.756023] env[62383]: value = "task-2451600" [ 780.756023] env[62383]: _type = "Task" [ 780.756023] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.762339] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451600, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.964329] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525028c4-bdac-77f5-65eb-2d64ade307cc, 'name': SearchDatastore_Task, 'duration_secs': 0.014033} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.967981] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ede82685-ef15-4db3-9156-dee10d6a9fe1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.977140] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 780.977140] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a43063-bd8d-c3ad-218a-db72fbadb02d" [ 780.977140] env[62383]: _type = "Task" [ 780.977140] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.984540] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a43063-bd8d-c3ad-218a-db72fbadb02d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.047712] env[62383]: DEBUG nova.scheduler.client.report [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 781.048985] env[62383]: DEBUG nova.network.neutron [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updated VIF entry in instance network info cache for port d2a0fa64-9d35-4dfd-8124-3099b780d7b7. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 781.049334] env[62383]: DEBUG nova.network.neutron [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updating instance_info_cache with network_info: [{"id": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "address": "fa:16:3e:51:f7:25", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0fa64-9d", "ovs_interfaceid": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.152670] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.152989] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26a9fd06-e891-429e-bca6-bf09a5c28f43 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.162370] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 781.162370] env[62383]: value = "task-2451601" [ 781.162370] env[62383]: _type = "Task" [ 781.162370] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.171282] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.264639] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451600, 'name': Rename_Task, 'duration_secs': 0.138224} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.264911] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 781.265032] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f711a256-447b-4fa1-a087-aac30ab7b60e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.272937] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 781.272937] env[62383]: value = "task-2451602" [ 781.272937] env[62383]: _type = "Task" [ 781.272937] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.286773] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.309535] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.309894] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.310148] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.310348] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.310566] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.313023] env[62383]: INFO nova.compute.manager [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Terminating instance [ 781.486582] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a43063-bd8d-c3ad-218a-db72fbadb02d, 'name': SearchDatastore_Task, 'duration_secs': 0.010778} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.487047] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.487155] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/a27fcace-4fb3-48fb-946d-b8057f6ee601.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 781.487456] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5299dc1-bbf4-4f99-b03f-5e8714ecd269 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.494137] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 781.494137] env[62383]: value = "task-2451603" [ 781.494137] env[62383]: _type = "Task" [ 781.494137] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.501954] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.552606] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.120s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.557682] env[62383]: DEBUG oslo_concurrency.lockutils [req-689e8f12-6099-4b56-89df-6af4d00459af req-df92c0f5-6515-46ec-805e-c168ce2be0e3 service nova] Releasing lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.557682] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.136s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.557682] env[62383]: INFO nova.compute.claims [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.588460] env[62383]: INFO nova.scheduler.client.report [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Deleted allocations for instance 583138d1-f928-4e33-a443-11c627203c44 [ 781.677942] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451601, 'name': PowerOffVM_Task, 'duration_secs': 0.271497} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.678504] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 781.678956] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance '9604eadf-a027-46dd-989b-0d4b752f883a' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 781.787659] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451602, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.817760] env[62383]: DEBUG nova.compute.manager [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 781.818125] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.819613] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f486f5f-cb87-4ebf-a15c-4e9cde95fe40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.831854] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.832228] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cd0a0b6-fafd-46de-857f-4f8379212b22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.842584] env[62383]: DEBUG oslo_vmware.api [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 781.842584] env[62383]: value = "task-2451604" [ 781.842584] env[62383]: _type = "Task" [ 781.842584] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.852776] env[62383]: DEBUG oslo_vmware.api [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451604, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.011382] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451603, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.101306] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bc3fbce0-b448-4d38-9498-fd489696432f tempest-InstanceActionsTestJSON-695529899 tempest-InstanceActionsTestJSON-695529899-project-member] Lock "583138d1-f928-4e33-a443-11c627203c44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.075s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.188097] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.188097] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.188097] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.188097] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.188292] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.188292] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.188485] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.188689] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.188869] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.189104] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.189251] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.195578] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2ee3db7-5e71-46a8-8ee3-24280baad3d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.212396] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 782.212396] env[62383]: value = "task-2451605" [ 782.212396] env[62383]: _type = "Task" [ 782.212396] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.221749] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.284534] env[62383]: DEBUG oslo_vmware.api [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451602, 'name': PowerOnVM_Task, 'duration_secs': 0.990646} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.285147] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 782.285513] env[62383]: INFO nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Took 8.19 seconds to spawn the instance on the hypervisor. [ 782.285858] env[62383]: DEBUG nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 782.288666] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd9e41d-c5dd-48bc-8ef6-f3ae6ab24556 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.352700] env[62383]: DEBUG oslo_vmware.api [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451604, 'name': PowerOffVM_Task, 'duration_secs': 0.218226} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.353241] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 782.355499] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 782.355499] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59801d21-599d-4e45-b0c5-90d8141704d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.505229] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598942} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.505539] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/a27fcace-4fb3-48fb-946d-b8057f6ee601.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.505766] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.506046] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b9661cb-0ad5-4da4-9201-1cddab6610d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.511115] env[62383]: DEBUG nova.compute.manager [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Received event network-changed-d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 782.511310] env[62383]: DEBUG nova.compute.manager [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Refreshing instance network info cache due to event network-changed-d2a0fa64-9d35-4dfd-8124-3099b780d7b7. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 782.511472] env[62383]: DEBUG oslo_concurrency.lockutils [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] Acquiring lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.511618] env[62383]: DEBUG oslo_concurrency.lockutils [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] Acquired lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.511775] env[62383]: DEBUG nova.network.neutron [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Refreshing network info cache for port d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 782.516557] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 782.516557] env[62383]: value = "task-2451607" [ 782.516557] env[62383]: _type = "Task" [ 782.516557] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.529508] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451607, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.553629] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 782.553747] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 782.553947] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Deleting the datastore file [datastore2] 9c2c55a9-5b24-4d52-8d6b-666609349a3a {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.554306] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acab4f9c-b8e8-45bc-a7a7-dcd49158fbcb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.561885] env[62383]: DEBUG oslo_vmware.api [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for the task: (returnval){ [ 782.561885] env[62383]: value = "task-2451608" [ 782.561885] env[62383]: _type = "Task" [ 782.561885] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.573270] env[62383]: DEBUG oslo_vmware.api [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.722979] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451605, 'name': ReconfigVM_Task, 'duration_secs': 0.315335} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.723323] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance '9604eadf-a027-46dd-989b-0d4b752f883a' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 782.810141] env[62383]: INFO nova.compute.manager [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Took 49.88 seconds to build instance. [ 783.029297] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451607, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071264} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.032016] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.033097] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8781d8f4-79dd-453c-8f26-641fda13a7a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.055570] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/a27fcace-4fb3-48fb-946d-b8057f6ee601.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.058406] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2ad1f37-0d01-4868-896b-53720c6c1edd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.083984] env[62383]: DEBUG oslo_vmware.api [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.088112] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 783.088112] env[62383]: value = "task-2451609" [ 783.088112] env[62383]: _type = "Task" [ 783.088112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.097578] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451609, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.134068] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90494c8b-2749-4246-9119-d39788eb1d32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.143620] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffa1b58-241c-40e4-9ce2-a58aab448c5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.174432] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf83c56-6077-470b-ad4c-5f45100e150c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.182023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286fbf9d-35fb-406e-a3a9-1d93f75dfdd5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.195500] env[62383]: DEBUG nova.compute.provider_tree [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.231966] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:26:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='4fbcd04d-cb08-4e45-b5c9-0176dc87583e',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1902383657',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 783.232247] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.232352] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 783.232538] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.232688] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 783.233218] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 783.233218] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 783.233218] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 783.233376] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 783.233514] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 783.233723] env[62383]: DEBUG nova.virt.hardware [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 783.239249] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 783.240641] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fb7395c-2c66-46a0-85b4-0d39175c0f48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.258147] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 783.258147] env[62383]: value = "task-2451610" [ 783.258147] env[62383]: _type = "Task" [ 783.258147] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.265905] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451610, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.312086] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fbb3d316-a9dd-4167-bc97-53dff24c4986 tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.226s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.584107] env[62383]: DEBUG oslo_vmware.api [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Task: {'id': task-2451608, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.865795} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.584452] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 783.584547] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 783.584728] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 783.584964] env[62383]: INFO nova.compute.manager [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Took 1.77 seconds to destroy the instance on the hypervisor. [ 783.585164] env[62383]: DEBUG oslo.service.loopingcall [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.585367] env[62383]: DEBUG nova.compute.manager [-] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 783.585444] env[62383]: DEBUG nova.network.neutron [-] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 783.595162] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.616461] env[62383]: DEBUG nova.network.neutron [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updated VIF entry in instance network info cache for port d2a0fa64-9d35-4dfd-8124-3099b780d7b7. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 783.616581] env[62383]: DEBUG nova.network.neutron [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updating instance_info_cache with network_info: [{"id": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "address": "fa:16:3e:51:f7:25", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2a0fa64-9d", "ovs_interfaceid": "d2a0fa64-9d35-4dfd-8124-3099b780d7b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.701214] env[62383]: DEBUG nova.scheduler.client.report [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 783.768516] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451610, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.098038] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451609, 'name': ReconfigVM_Task, 'duration_secs': 1.012296} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.099217] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Reconfigured VM instance instance-0000003a to attach disk [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/a27fcace-4fb3-48fb-946d-b8057f6ee601.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 784.099949] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f345789a-7918-4aae-808c-9217a2a1d492 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.106144] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 784.106144] env[62383]: value = "task-2451611" [ 784.106144] env[62383]: _type = "Task" [ 784.106144] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.114852] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451611, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.123474] env[62383]: DEBUG oslo_concurrency.lockutils [req-48eeebff-4918-4112-860b-2ff552567ed2 req-e341a264-bdd5-4396-a6e4-04acd63adc1a service nova] Releasing lock "refresh_cache-5ef22e87-f73c-47ba-b925-2bd2effe74eb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.204865] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.205593] env[62383]: DEBUG nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 784.208638] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.736s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.212193] env[62383]: DEBUG nova.objects.instance [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lazy-loading 'resources' on Instance uuid 0f48434f-859f-4910-883f-2f81be647bad {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.270364] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451610, 'name': ReconfigVM_Task, 'duration_secs': 0.586209} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.273687] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 784.273687] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef0e176-8e07-41e6-8e89-46890f92f7e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.301562] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.301937] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9df4f29-fb74-4843-a606-2cb3ae040a41 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.324201] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 784.324201] env[62383]: value = "task-2451612" [ 784.324201] env[62383]: _type = "Task" [ 784.324201] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.335715] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.467262] env[62383]: DEBUG nova.network.neutron [-] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.473803] env[62383]: DEBUG nova.compute.manager [req-f73283f3-6ec2-4c62-b182-840513b81033 req-9b25ae7d-1b13-439e-be26-d0c7a79b0d27 service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Received event network-vif-deleted-d251f129-de40-462a-86b9-50939d1a57c2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 784.474822] env[62383]: INFO nova.compute.manager [req-f73283f3-6ec2-4c62-b182-840513b81033 req-9b25ae7d-1b13-439e-be26-d0c7a79b0d27 service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Neutron deleted interface d251f129-de40-462a-86b9-50939d1a57c2; detaching it from the instance and deleting it from the info cache [ 784.474822] env[62383]: DEBUG nova.network.neutron [req-f73283f3-6ec2-4c62-b182-840513b81033 req-9b25ae7d-1b13-439e-be26-d0c7a79b0d27 service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.615597] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451611, 'name': Rename_Task, 'duration_secs': 0.149302} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.615876] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 784.616116] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e26b077e-c257-4c3c-ad5e-6195911fcb53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.624162] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 784.624162] env[62383]: value = "task-2451613" [ 784.624162] env[62383]: _type = "Task" [ 784.624162] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.634787] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.718410] env[62383]: DEBUG nova.compute.utils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 784.720722] env[62383]: DEBUG nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 784.721042] env[62383]: DEBUG nova.network.neutron [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.782546] env[62383]: DEBUG nova.policy [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6324b3a4f5a24752b0bef1b5d79ea2ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fabc88f824a44c57b19a07a605fb89fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 784.839721] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451612, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.971217] env[62383]: INFO nova.compute.manager [-] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Took 1.38 seconds to deallocate network for instance. [ 784.980027] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85cd1f28-33c2-4caa-a2b8-b7f9e7b4d8e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.993024] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6c2b16-5536-4392-aa17-5601c3e9c78b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.040927] env[62383]: DEBUG nova.compute.manager [req-f73283f3-6ec2-4c62-b182-840513b81033 req-9b25ae7d-1b13-439e-be26-d0c7a79b0d27 service nova] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Detach interface failed, port_id=d251f129-de40-462a-86b9-50939d1a57c2, reason: Instance 9c2c55a9-5b24-4d52-8d6b-666609349a3a could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 785.102093] env[62383]: DEBUG nova.network.neutron [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Successfully created port: 9a3f1093-bdac-4a2c-8938-e0953e605535 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.137942] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451613, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.223941] env[62383]: DEBUG nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 785.337524] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451612, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.347741] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff07e14-64e9-415a-8514-29b22354e3fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.353694] env[62383]: DEBUG nova.compute.manager [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 785.353962] env[62383]: DEBUG nova.compute.manager [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing instance network info cache due to event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 785.354273] env[62383]: DEBUG oslo_concurrency.lockutils [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] Acquiring lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.354610] env[62383]: DEBUG oslo_concurrency.lockutils [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] Acquired lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.354775] env[62383]: DEBUG nova.network.neutron [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.361805] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8833d7e5-dda5-4192-adf1-b256f705166a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.399070] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0653542c-ae69-4e77-b26e-a2815a69f1b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.407504] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e820a09-cb06-4157-812e-d897aa4f6792 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.423094] env[62383]: DEBUG nova.compute.provider_tree [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 785.478123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 785.634721] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451613, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.841232] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451612, 'name': ReconfigVM_Task, 'duration_secs': 1.269452} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.841633] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 785.841987] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance '9604eadf-a027-46dd-989b-0d4b752f883a' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 785.950797] env[62383]: ERROR nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [req-71a779ed-3339-4a2e-b553-87369efe2e0e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-71a779ed-3339-4a2e-b553-87369efe2e0e"}]} [ 785.980344] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 785.996037] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 785.996037] env[62383]: DEBUG nova.compute.provider_tree [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 786.008558] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 786.026266] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 786.141561] env[62383]: DEBUG oslo_vmware.api [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451613, 'name': PowerOnVM_Task, 'duration_secs': 1.156775} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.141865] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 786.142085] env[62383]: INFO nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Took 9.39 seconds to spawn the instance on the hypervisor. [ 786.142270] env[62383]: DEBUG nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 786.143868] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa08241d-773e-4abe-9525-7878898cfd94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.194108] env[62383]: DEBUG nova.network.neutron [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updated VIF entry in instance network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.195022] env[62383]: DEBUG nova.network.neutron [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updating instance_info_cache with network_info: [{"id": "c23968b2-dbec-433d-8bcc-80644a89ec08", "address": "fa:16:3e:3f:6c:37", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23968b2-db", "ovs_interfaceid": "c23968b2-dbec-433d-8bcc-80644a89ec08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.234078] env[62383]: DEBUG nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 786.270765] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 786.271245] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.271533] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 786.271826] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.272099] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 786.272382] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 786.272715] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 786.272989] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 786.273280] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 786.273570] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 786.273871] env[62383]: DEBUG nova.virt.hardware [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 786.274855] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821da85d-9111-4711-bcf5-f5f678b2a86a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.285590] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f798f5c4-20a4-41f6-8be2-667314b79f3f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.349478] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c753b5d-15bc-488a-9a16-0aeda5546a50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.375961] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40682e6-2069-4291-b133-f22c7129a6f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.396212] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance '9604eadf-a027-46dd-989b-0d4b752f883a' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 786.527736] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.527967] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.633267] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7315fa4b-f460-4e08-bfec-127ca9e53c82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.641874] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e53c432-6da8-44cf-a14c-ae71dba4d0fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.685059] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46285202-5268-4485-97c7-eb4b3a9b9132 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.688858] env[62383]: INFO nova.compute.manager [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Took 53.25 seconds to build instance. [ 786.696353] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11831b6f-d4d3-4556-9b33-66bb55ec6a38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.701483] env[62383]: DEBUG oslo_concurrency.lockutils [req-e30fc1eb-a257-4571-a64e-08eff947b4a1 req-3b5b7c1e-be8f-44ba-ab1b-b32111b601e5 service nova] Releasing lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.712324] env[62383]: DEBUG nova.compute.provider_tree [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 786.836194] env[62383]: DEBUG nova.compute.manager [req-7750f4c9-2893-463c-a19a-9d09b538037f req-f04d9361-e016-4900-9011-ecec5c81b085 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Received event network-vif-plugged-9a3f1093-bdac-4a2c-8938-e0953e605535 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 786.836194] env[62383]: DEBUG oslo_concurrency.lockutils [req-7750f4c9-2893-463c-a19a-9d09b538037f req-f04d9361-e016-4900-9011-ecec5c81b085 service nova] Acquiring lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.836194] env[62383]: DEBUG oslo_concurrency.lockutils [req-7750f4c9-2893-463c-a19a-9d09b538037f req-f04d9361-e016-4900-9011-ecec5c81b085 service nova] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.836194] env[62383]: DEBUG oslo_concurrency.lockutils [req-7750f4c9-2893-463c-a19a-9d09b538037f req-f04d9361-e016-4900-9011-ecec5c81b085 service nova] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.836194] env[62383]: DEBUG nova.compute.manager [req-7750f4c9-2893-463c-a19a-9d09b538037f req-f04d9361-e016-4900-9011-ecec5c81b085 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] No waiting events found dispatching network-vif-plugged-9a3f1093-bdac-4a2c-8938-e0953e605535 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 786.840053] env[62383]: WARNING nova.compute.manager [req-7750f4c9-2893-463c-a19a-9d09b538037f req-f04d9361-e016-4900-9011-ecec5c81b085 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Received unexpected event network-vif-plugged-9a3f1093-bdac-4a2c-8938-e0953e605535 for instance with vm_state building and task_state spawning. [ 786.840053] env[62383]: DEBUG nova.network.neutron [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Successfully updated port: 9a3f1093-bdac-4a2c-8938-e0953e605535 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.963343] env[62383]: DEBUG nova.network.neutron [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Port 796c3e3e-48f2-4d7f-8f7d-974f792c4426 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 787.033097] env[62383]: DEBUG nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 787.191354] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e4c190fd-bca2-4c88-bc57-bc2a2b350b4f tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.267s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.242376] env[62383]: ERROR nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [req-5a24e190-97ef-4dc2-8778-92c7d4010f93] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5a24e190-97ef-4dc2-8778-92c7d4010f93"}]} [ 787.263432] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 787.281480] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 787.281723] env[62383]: DEBUG nova.compute.provider_tree [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 787.296423] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 787.320353] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 787.342669] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "refresh_cache-a68610a6-f684-4cc9-8dd4-8b90d2d379da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.342819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "refresh_cache-a68610a6-f684-4cc9-8dd4-8b90d2d379da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.343106] env[62383]: DEBUG nova.network.neutron [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.491968] env[62383]: DEBUG nova.compute.manager [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 787.491968] env[62383]: DEBUG nova.compute.manager [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing instance network info cache due to event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 787.491968] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] Acquiring lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.491968] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] Acquired lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.491968] env[62383]: DEBUG nova.network.neutron [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.561806] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.884756] env[62383]: DEBUG nova.network.neutron [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.894328] env[62383]: INFO nova.compute.manager [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Rescuing [ 787.894328] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.894328] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.894328] env[62383]: DEBUG nova.network.neutron [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.950490] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa52b3c-c0a0-40c1-a7de-34581bba731a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.960391] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a320f051-f946-43bc-a402-0e5696232207 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.004683] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cff6a3a-a596-489e-b401-5ef21655ded9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.016200] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.016200] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.016200] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.027023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdfeb1b-55ba-42c1-9179-4bb4fb3c2df7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.039895] env[62383]: DEBUG nova.compute.provider_tree [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.189225] env[62383]: DEBUG nova.network.neutron [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Updating instance_info_cache with network_info: [{"id": "9a3f1093-bdac-4a2c-8938-e0953e605535", "address": "fa:16:3e:76:cf:94", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a3f1093-bd", "ovs_interfaceid": "9a3f1093-bdac-4a2c-8938-e0953e605535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.338170] env[62383]: DEBUG nova.network.neutron [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updated VIF entry in instance network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.338698] env[62383]: DEBUG nova.network.neutron [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updating instance_info_cache with network_info: [{"id": "c23968b2-dbec-433d-8bcc-80644a89ec08", "address": "fa:16:3e:3f:6c:37", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23968b2-db", "ovs_interfaceid": "c23968b2-dbec-433d-8bcc-80644a89ec08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.578433] env[62383]: DEBUG nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 86 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 788.579502] env[62383]: DEBUG nova.compute.provider_tree [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 86 to 87 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 788.579502] env[62383]: DEBUG nova.compute.provider_tree [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 788.680390] env[62383]: DEBUG nova.network.neutron [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Updating instance_info_cache with network_info: [{"id": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "address": "fa:16:3e:2b:e9:9a", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6625886-1c", "ovs_interfaceid": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.692924] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "refresh_cache-a68610a6-f684-4cc9-8dd4-8b90d2d379da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.693247] env[62383]: DEBUG nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Instance network_info: |[{"id": "9a3f1093-bdac-4a2c-8938-e0953e605535", "address": "fa:16:3e:76:cf:94", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a3f1093-bd", "ovs_interfaceid": "9a3f1093-bdac-4a2c-8938-e0953e605535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 788.693645] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:cf:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5f60c972-a72d-4c5f-a250-faadfd6eafbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a3f1093-bdac-4a2c-8938-e0953e605535', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 788.701471] env[62383]: DEBUG oslo.service.loopingcall [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.702072] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 788.702335] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-689d032f-86c5-4396-9b6f-82e7a21821cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.725933] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 788.725933] env[62383]: value = "task-2451614" [ 788.725933] env[62383]: _type = "Task" [ 788.725933] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.734222] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451614, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.842548] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] Releasing lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.842867] env[62383]: DEBUG nova.compute.manager [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Received event network-changed-cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 788.843064] env[62383]: DEBUG nova.compute.manager [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Refreshing instance network info cache due to event network-changed-cc7bb81b-ee7e-4bd2-8c93-c133276ee413. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 788.843296] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] Acquiring lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.843445] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] Acquired lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.843608] env[62383]: DEBUG nova.network.neutron [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Refreshing network info cache for port cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.860904] env[62383]: DEBUG nova.compute.manager [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Received event network-changed-9a3f1093-bdac-4a2c-8938-e0953e605535 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 788.861198] env[62383]: DEBUG nova.compute.manager [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Refreshing instance network info cache due to event network-changed-9a3f1093-bdac-4a2c-8938-e0953e605535. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 788.861354] env[62383]: DEBUG oslo_concurrency.lockutils [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] Acquiring lock "refresh_cache-a68610a6-f684-4cc9-8dd4-8b90d2d379da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.861496] env[62383]: DEBUG oslo_concurrency.lockutils [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] Acquired lock "refresh_cache-a68610a6-f684-4cc9-8dd4-8b90d2d379da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.861682] env[62383]: DEBUG nova.network.neutron [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Refreshing network info cache for port 9a3f1093-bdac-4a2c-8938-e0953e605535 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 788.907303] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.907611] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.907822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.908035] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.908239] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.911144] env[62383]: INFO nova.compute.manager [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Terminating instance [ 789.050631] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.050872] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.051038] env[62383]: DEBUG nova.network.neutron [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.084553] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.876s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.087899] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.371s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.090470] env[62383]: INFO nova.compute.claims [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.118621] env[62383]: INFO nova.scheduler.client.report [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted allocations for instance 0f48434f-859f-4910-883f-2f81be647bad [ 789.183153] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.238331] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451614, 'name': CreateVM_Task, 'duration_secs': 0.385349} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.238460] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 789.239435] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.239435] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.239583] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 789.240751] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc543722-7a0b-43ff-920d-8962ef970fb1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.244765] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 789.244765] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52407af8-dc9d-12a8-3395-aa595aeb480a" [ 789.244765] env[62383]: _type = "Task" [ 789.244765] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.252680] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52407af8-dc9d-12a8-3395-aa595aeb480a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.416771] env[62383]: DEBUG nova.compute.manager [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 789.417025] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 789.418240] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac22aa1-0371-49fb-8b6a-9cc31c650da2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.431021] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 789.431021] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-420d6d7f-0095-4ac8-a527-68d8f052ea44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.436350] env[62383]: DEBUG oslo_vmware.api [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 789.436350] env[62383]: value = "task-2451615" [ 789.436350] env[62383]: _type = "Task" [ 789.436350] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.448078] env[62383]: DEBUG oslo_vmware.api [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.631410] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8b0ca1a9-8d4d-4a47-b19f-16f8d676e647 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "0f48434f-859f-4910-883f-2f81be647bad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.229s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.684155] env[62383]: DEBUG nova.network.neutron [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Updated VIF entry in instance network info cache for port 9a3f1093-bdac-4a2c-8938-e0953e605535. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.684790] env[62383]: DEBUG nova.network.neutron [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Updating instance_info_cache with network_info: [{"id": "9a3f1093-bdac-4a2c-8938-e0953e605535", "address": "fa:16:3e:76:cf:94", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a3f1093-bd", "ovs_interfaceid": "9a3f1093-bdac-4a2c-8938-e0953e605535", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.702118] env[62383]: DEBUG nova.network.neutron [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updated VIF entry in instance network info cache for port cc7bb81b-ee7e-4bd2-8c93-c133276ee413. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.702466] env[62383]: DEBUG nova.network.neutron [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updating instance_info_cache with network_info: [{"id": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "address": "fa:16:3e:8c:8c:21", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc7bb81b-ee", "ovs_interfaceid": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.756968] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52407af8-dc9d-12a8-3395-aa595aeb480a, 'name': SearchDatastore_Task, 'duration_secs': 0.039439} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.759064] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.759317] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 789.759574] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.759687] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.759861] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 789.760153] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43f22c03-602c-4f2d-a8df-a60accc36b58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.768077] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 789.768294] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 789.772018] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b9da63c-5e0c-4a88-a1b3-a07ab7ca8b47 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.774572] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 789.774572] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52bf7495-7f50-4bcc-3354-b1f8f2a5d75a" [ 789.774572] env[62383]: _type = "Task" [ 789.774572] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.782957] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bf7495-7f50-4bcc-3354-b1f8f2a5d75a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.877742] env[62383]: DEBUG nova.network.neutron [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance_info_cache with network_info: [{"id": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "address": "fa:16:3e:9b:eb:a7", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap796c3e3e-48", "ovs_interfaceid": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.950857] env[62383]: DEBUG oslo_vmware.api [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451615, 'name': PowerOffVM_Task, 'duration_secs': 0.419242} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.950857] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 789.950857] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 789.950857] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69833182-9f67-48a6-a4e8-33bc06fbd0d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.007987] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 790.007987] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 790.008196] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Deleting the datastore file [datastore1] bc1e1f0c-a86d-4d31-a8c4-45d362e9b807 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 790.008470] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc42ce7d-5a55-49e6-a7d7-be4faafaeb3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.015151] env[62383]: DEBUG oslo_vmware.api [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 790.015151] env[62383]: value = "task-2451617" [ 790.015151] env[62383]: _type = "Task" [ 790.015151] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.023762] env[62383]: DEBUG oslo_vmware.api [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451617, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.138202] env[62383]: DEBUG nova.compute.manager [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Received event network-changed-cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 790.138402] env[62383]: DEBUG nova.compute.manager [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Refreshing instance network info cache due to event network-changed-cc7bb81b-ee7e-4bd2-8c93-c133276ee413. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 790.138593] env[62383]: DEBUG oslo_concurrency.lockutils [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] Acquiring lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.188279] env[62383]: DEBUG oslo_concurrency.lockutils [req-72e05e29-8126-467e-b7f0-dbccba3ba90d req-6107f419-e8f1-4341-9c27-1a1ee06f7900 service nova] Releasing lock "refresh_cache-a68610a6-f684-4cc9-8dd4-8b90d2d379da" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.208806] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2625995-4896-40bb-9e1c-7de24aa9a6ae req-5d5b12ae-3d65-4385-b7df-cd9b0664a2bb service nova] Releasing lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.209676] env[62383]: DEBUG oslo_concurrency.lockutils [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] Acquired lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.209882] env[62383]: DEBUG nova.network.neutron [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Refreshing network info cache for port cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.288445] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bf7495-7f50-4bcc-3354-b1f8f2a5d75a, 'name': SearchDatastore_Task, 'duration_secs': 0.009305} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.289266] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a4f0b0f-3b86-4ab8-9de6-27e0426352d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.296386] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 790.296386] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527b5b90-832c-6c0e-cd97-040f30bc1b1d" [ 790.296386] env[62383]: _type = "Task" [ 790.296386] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.304439] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527b5b90-832c-6c0e-cd97-040f30bc1b1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.386654] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.533702] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "1b025655-acad-4b70-9e1a-489683cafb7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.533925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "1b025655-acad-4b70-9e1a-489683cafb7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.539513] env[62383]: DEBUG oslo_vmware.api [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451617, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197457} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.539815] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 790.540269] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 790.540618] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 790.540829] env[62383]: INFO nova.compute.manager [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Took 1.12 seconds to destroy the instance on the hypervisor. [ 790.541274] env[62383]: DEBUG oslo.service.loopingcall [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.541497] env[62383]: DEBUG nova.compute.manager [-] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 790.541599] env[62383]: DEBUG nova.network.neutron [-] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 790.622277] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485cef7b-8a61-4a33-be43-e9496989359d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.630307] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d4fd11-fc97-4d09-8f0b-0cc3f09efe66 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.667921] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68193d1-89ae-4c98-8907-ff831b648ae6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.678126] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a637594-dc1c-4667-820d-c567a559c808 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.693338] env[62383]: DEBUG nova.compute.provider_tree [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.722979] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.723296] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72f7ead9-642f-4d65-8e43-7d6a0e49fd67 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.730148] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 790.730148] env[62383]: value = "task-2451618" [ 790.730148] env[62383]: _type = "Task" [ 790.730148] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.738591] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.806722] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527b5b90-832c-6c0e-cd97-040f30bc1b1d, 'name': SearchDatastore_Task, 'duration_secs': 0.010764} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.809137] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.809402] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a68610a6-f684-4cc9-8dd4-8b90d2d379da/a68610a6-f684-4cc9-8dd4-8b90d2d379da.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 790.809830] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b6399d12-641b-46ab-acb4-f89f4ee23ae9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.816215] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 790.816215] env[62383]: value = "task-2451619" [ 790.816215] env[62383]: _type = "Task" [ 790.816215] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.823772] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.910142] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df64c9e7-067f-4e1a-89e6-35213eb91be6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.934672] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d0dd73-0bd0-4a38-95a8-ef2c6785b475 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.942986] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance '9604eadf-a027-46dd-989b-0d4b752f883a' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 790.947425] env[62383]: DEBUG nova.network.neutron [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updated VIF entry in instance network info cache for port cc7bb81b-ee7e-4bd2-8c93-c133276ee413. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.947756] env[62383]: DEBUG nova.network.neutron [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updating instance_info_cache with network_info: [{"id": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "address": "fa:16:3e:8c:8c:21", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc7bb81b-ee", "ovs_interfaceid": "cc7bb81b-ee7e-4bd2-8c93-c133276ee413", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.039909] env[62383]: DEBUG nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 791.201619] env[62383]: DEBUG nova.scheduler.client.report [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 791.241554] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451618, 'name': PowerOffVM_Task, 'duration_secs': 0.195323} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.241883] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 791.242777] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e772b4-0140-4a4b-a5b6-cb08c9349308 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.265666] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa248d2-3b62-4457-9fec-aa60c5eaac5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.302277] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 791.302618] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2953942c-c463-47fc-b458-0089b93104bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.310009] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 791.310009] env[62383]: value = "task-2451620" [ 791.310009] env[62383]: _type = "Task" [ 791.310009] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.326428] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 791.326428] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.326428] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.326428] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.326428] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.326989] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3ff0e060-eff6-4e1c-a3a8-fd0dd092b8b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.333764] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451619, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488752} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.335044] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a68610a6-f684-4cc9-8dd4-8b90d2d379da/a68610a6-f684-4cc9-8dd4-8b90d2d379da.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.335728] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.335728] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.335924] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.336568] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c10a7c6b-aa3d-4253-bb36-974b8df1a7b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.338557] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c43ff16-04d1-4203-9455-cb6894f15648 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.344069] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 791.344069] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5292d9ec-b296-1416-a163-4128ffe2ed4d" [ 791.344069] env[62383]: _type = "Task" [ 791.344069] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.345378] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 791.345378] env[62383]: value = "task-2451621" [ 791.345378] env[62383]: _type = "Task" [ 791.345378] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.358757] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5292d9ec-b296-1416-a163-4128ffe2ed4d, 'name': SearchDatastore_Task, 'duration_secs': 0.010328} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.363662] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.364372] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bc571fe-6671-4fb5-b843-865a698b6199 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.369363] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 791.369363] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e9f2e0-b11e-b111-c5eb-1857841272fd" [ 791.369363] env[62383]: _type = "Task" [ 791.369363] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.377553] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e9f2e0-b11e-b111-c5eb-1857841272fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.451480] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.451957] env[62383]: DEBUG oslo_concurrency.lockutils [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] Releasing lock "refresh_cache-bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.452200] env[62383]: DEBUG nova.compute.manager [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 791.452367] env[62383]: DEBUG nova.compute.manager [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing instance network info cache due to event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 791.452579] env[62383]: DEBUG oslo_concurrency.lockutils [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] Acquiring lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.452723] env[62383]: DEBUG oslo_concurrency.lockutils [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] Acquired lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.452884] env[62383]: DEBUG nova.network.neutron [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.454097] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b120cc3-7edc-415c-bc94-9ef8b486543f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.461090] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 791.461090] env[62383]: value = "task-2451622" [ 791.461090] env[62383]: _type = "Task" [ 791.461090] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.469855] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.578182] env[62383]: DEBUG nova.network.neutron [-] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.640532] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.706879] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.707497] env[62383]: DEBUG nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 791.711906] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 44.535s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.712142] env[62383]: DEBUG nova.objects.instance [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 791.717672] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "bc37e114-cf55-408b-9841-05eaf411b4f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.717890] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.861033] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092715} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.863052] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 791.863160] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f1b071-b017-4368-af24-0174ae1cddf9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.886947] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] a68610a6-f684-4cc9-8dd4-8b90d2d379da/a68610a6-f684-4cc9-8dd4-8b90d2d379da.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 791.889915] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e27161e-9e69-4e61-8ebf-3f0ad4277f20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.912173] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e9f2e0-b11e-b111-c5eb-1857841272fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009508} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.913497] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.913757] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. {{(pid=62383) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 791.914088] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 791.914088] env[62383]: value = "task-2451623" [ 791.914088] env[62383]: _type = "Task" [ 791.914088] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.914279] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9090d05-2fdd-43fa-bfed-93693772c2b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.925799] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451623, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.926537] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 791.926537] env[62383]: value = "task-2451624" [ 791.926537] env[62383]: _type = "Task" [ 791.926537] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.934410] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.970957] env[62383]: DEBUG oslo_vmware.api [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451622, 'name': PowerOnVM_Task, 'duration_secs': 0.390896} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.971304] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.971527] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f4c7d38-c6ee-4dc9-beb4-3c5b963b9d08 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance '9604eadf-a027-46dd-989b-0d4b752f883a' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 792.080857] env[62383]: INFO nova.compute.manager [-] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Took 1.54 seconds to deallocate network for instance. [ 792.158164] env[62383]: DEBUG nova.network.neutron [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updated VIF entry in instance network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 792.158539] env[62383]: DEBUG nova.network.neutron [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updating instance_info_cache with network_info: [{"id": "c23968b2-dbec-433d-8bcc-80644a89ec08", "address": "fa:16:3e:3f:6c:37", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23968b2-db", "ovs_interfaceid": "c23968b2-dbec-433d-8bcc-80644a89ec08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.168025] env[62383]: DEBUG nova.compute.manager [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Received event network-vif-deleted-cc7bb81b-ee7e-4bd2-8c93-c133276ee413 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 792.168254] env[62383]: DEBUG nova.compute.manager [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 792.168372] env[62383]: DEBUG nova.compute.manager [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing instance network info cache due to event network-changed-c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 792.168544] env[62383]: DEBUG oslo_concurrency.lockutils [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] Acquiring lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.218663] env[62383]: DEBUG nova.compute.utils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 792.223894] env[62383]: DEBUG nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 792.223894] env[62383]: DEBUG nova.network.neutron [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 792.226190] env[62383]: DEBUG nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.232557] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba2fc891-8f9b-4b87-a7a2-1ebac8d8f77a tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.521s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.233751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.966s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.234069] env[62383]: DEBUG nova.objects.instance [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lazy-loading 'resources' on Instance uuid 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 792.273534] env[62383]: DEBUG nova.policy [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f811e2f3423e44d597363b1dc8fa5e2b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '304a62370f8149049a797eb7077e910b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 792.426192] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451623, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.436336] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.588064] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.598286] env[62383]: DEBUG nova.network.neutron [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Successfully created port: ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 792.661632] env[62383]: DEBUG oslo_concurrency.lockutils [req-7ed8bb74-15bb-4a50-8c3b-a3af3d50ad16 req-f11b77c9-5b22-4eb7-a784-ae2f8081fc4f service nova] Releasing lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.662177] env[62383]: DEBUG oslo_concurrency.lockutils [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] Acquired lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.662377] env[62383]: DEBUG nova.network.neutron [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Refreshing network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 792.724921] env[62383]: DEBUG nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 792.765709] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.928955] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451623, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.942839] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451624, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.320579] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d14e47d-4354-4896-baa0-bdf9e097ed85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.328202] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f404dbb-bac5-44e9-9c6c-10de0f2030a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.360657] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958d5d9f-fbdd-490b-a0ab-a1680d9fcc34 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.368529] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcb9424-df7e-451d-a9d9-84d1f17986f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.381475] env[62383]: DEBUG nova.compute.provider_tree [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.391919] env[62383]: DEBUG nova.network.neutron [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updated VIF entry in instance network info cache for port c23968b2-dbec-433d-8bcc-80644a89ec08. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.392261] env[62383]: DEBUG nova.network.neutron [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updating instance_info_cache with network_info: [{"id": "c23968b2-dbec-433d-8bcc-80644a89ec08", "address": "fa:16:3e:3f:6c:37", "network": {"id": "72c0ae8f-f372-4446-8cfe-f03af5f72dfd", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-473869563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c04aced555934225bc58a044bfb4bc35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f41e4aa-0d23-48c4-a359-574abb2e7b9a", "external-id": "nsx-vlan-transportzone-695", "segmentation_id": 695, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23968b2-db", "ovs_interfaceid": "c23968b2-dbec-433d-8bcc-80644a89ec08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.425465] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451623, 'name': ReconfigVM_Task, 'duration_secs': 1.092254} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.425731] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Reconfigured VM instance instance-0000003b to attach disk [datastore2] a68610a6-f684-4cc9-8dd4-8b90d2d379da/a68610a6-f684-4cc9-8dd4-8b90d2d379da.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 793.426334] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6451b90a-2071-4983-bff8-1712242e3f4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.432761] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 793.432761] env[62383]: value = "task-2451625" [ 793.432761] env[62383]: _type = "Task" [ 793.432761] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.438946] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451624, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.241419} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.439640] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. [ 793.440479] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10dfc822-549d-49d7-a18c-ac38603c2b6c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.447746] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451625, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.470404] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.470727] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae963d81-7dcd-42a0-bfae-a77c0e234036 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.494259] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 793.494259] env[62383]: value = "task-2451626" [ 793.494259] env[62383]: _type = "Task" [ 793.494259] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.506486] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451626, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.743474] env[62383]: DEBUG nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 793.770074] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 793.770340] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.770502] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 793.770687] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.770836] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 793.770981] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 793.771222] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 793.771383] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 793.771548] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 793.771711] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 793.771883] env[62383]: DEBUG nova.virt.hardware [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 793.772900] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed97166-4c91-411a-9c06-c2beebbd9710 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.780935] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e8719b-4509-4761-b2fa-2a1a998db948 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.894860] env[62383]: DEBUG oslo_concurrency.lockutils [req-972b2192-f007-4821-8a63-7023adb184ff req-9367deb6-069f-4ab5-a3e1-e183b10c444a service nova] Releasing lock "refresh_cache-3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.907581] env[62383]: ERROR nova.scheduler.client.report [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] [req-a152a53d-e34f-4c3c-899a-700320978a6d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a152a53d-e34f-4c3c-899a-700320978a6d"}]} [ 793.923570] env[62383]: DEBUG nova.scheduler.client.report [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 793.938574] env[62383]: DEBUG nova.scheduler.client.report [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 793.938888] env[62383]: DEBUG nova.compute.provider_tree [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.944561] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451625, 'name': Rename_Task, 'duration_secs': 0.393276} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.944849] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.945132] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8c1190e-3391-4eb6-8821-b62adb66d54c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.954269] env[62383]: DEBUG nova.scheduler.client.report [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 793.958545] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 793.958545] env[62383]: value = "task-2451627" [ 793.958545] env[62383]: _type = "Task" [ 793.958545] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.967842] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.974686] env[62383]: DEBUG nova.scheduler.client.report [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 794.007101] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451626, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.470312] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451627, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.507882] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451626, 'name': ReconfigVM_Task, 'duration_secs': 0.581093} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.509360] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Reconfigured VM instance instance-0000003a to attach disk [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.510604] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fed3a9-c8c0-4c73-a895-d1a123d6a2f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.516084] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5c85b0-f1b7-4303-afa1-61ed51a55330 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.521469] env[62383]: DEBUG nova.compute.manager [req-d36e1db9-08b8-4b0c-9225-821fa35aaa14 req-8e12e4c5-a84e-43f1-b30d-fb4d109f5f18 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Received event network-vif-plugged-ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 794.521802] env[62383]: DEBUG oslo_concurrency.lockutils [req-d36e1db9-08b8-4b0c-9225-821fa35aaa14 req-8e12e4c5-a84e-43f1-b30d-fb4d109f5f18 service nova] Acquiring lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.522073] env[62383]: DEBUG oslo_concurrency.lockutils [req-d36e1db9-08b8-4b0c-9225-821fa35aaa14 req-8e12e4c5-a84e-43f1-b30d-fb4d109f5f18 service nova] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.522350] env[62383]: DEBUG oslo_concurrency.lockutils [req-d36e1db9-08b8-4b0c-9225-821fa35aaa14 req-8e12e4c5-a84e-43f1-b30d-fb4d109f5f18 service nova] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.522593] env[62383]: DEBUG nova.compute.manager [req-d36e1db9-08b8-4b0c-9225-821fa35aaa14 req-8e12e4c5-a84e-43f1-b30d-fb4d109f5f18 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] No waiting events found dispatching network-vif-plugged-ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 794.523023] env[62383]: WARNING nova.compute.manager [req-d36e1db9-08b8-4b0c-9225-821fa35aaa14 req-8e12e4c5-a84e-43f1-b30d-fb4d109f5f18 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Received unexpected event network-vif-plugged-ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 for instance with vm_state building and task_state spawning. [ 794.553554] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a430dc03-5a9d-4f1e-bd6b-914e3979a50d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.564740] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3193ed60-6340-4cbf-9167-0d6d97add286 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.605203] env[62383]: DEBUG nova.network.neutron [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Successfully updated port: ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 794.607945] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f51c2c7-2b12-476b-9f9e-c81b7bc90722 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.611194] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 794.611194] env[62383]: value = "task-2451628" [ 794.611194] env[62383]: _type = "Task" [ 794.611194] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.611945] env[62383]: DEBUG nova.network.neutron [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Port 796c3e3e-48f2-4d7f-8f7d-974f792c4426 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 794.612147] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.612299] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.612459] env[62383]: DEBUG nova.network.neutron [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 794.621451] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74a29ff-d3e4-416d-a3d9-d7e9a749e5d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.630590] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.642108] env[62383]: DEBUG nova.compute.provider_tree [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 794.968576] env[62383]: DEBUG oslo_vmware.api [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451627, 'name': PowerOnVM_Task, 'duration_secs': 0.789354} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.968908] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.969138] env[62383]: INFO nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Took 8.73 seconds to spawn the instance on the hypervisor. [ 794.969343] env[62383]: DEBUG nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.970131] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f363e4-baf5-4870-8f35-53fb23117821 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.114778] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "refresh_cache-282f2c94-7a63-4eef-aa80-7d67d0a0972a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.114778] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired lock "refresh_cache-282f2c94-7a63-4eef-aa80-7d67d0a0972a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.114778] env[62383]: DEBUG nova.network.neutron [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.127846] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451628, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.176939] env[62383]: DEBUG nova.scheduler.client.report [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 795.176939] env[62383]: DEBUG nova.compute.provider_tree [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 89 to 90 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 795.177164] env[62383]: DEBUG nova.compute.provider_tree [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 795.486252] env[62383]: INFO nova.compute.manager [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Took 55.09 seconds to build instance. [ 795.530050] env[62383]: DEBUG nova.network.neutron [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance_info_cache with network_info: [{"id": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "address": "fa:16:3e:9b:eb:a7", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap796c3e3e-48", "ovs_interfaceid": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.626909] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451628, 'name': ReconfigVM_Task, 'duration_secs': 0.734054} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.627699] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.627784] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-651f782a-6381-4111-9642-c6537f6407bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.634700] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 795.634700] env[62383]: value = "task-2451629" [ 795.634700] env[62383]: _type = "Task" [ 795.634700] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.643487] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.656477] env[62383]: DEBUG nova.network.neutron [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.684944] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.451s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.687145] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.496s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.687379] env[62383]: DEBUG nova.objects.instance [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lazy-loading 'resources' on Instance uuid 563840a8-8fa7-4bfa-9912-933c14e7076a {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 795.708535] env[62383]: INFO nova.scheduler.client.report [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Deleted allocations for instance 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9 [ 795.794061] env[62383]: DEBUG nova.network.neutron [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Updating instance_info_cache with network_info: [{"id": "ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3", "address": "fa:16:3e:ab:e2:99", "network": {"id": "86f13b93-2f69-4ec7-b838-5d8cb11a1051", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1138221332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "304a62370f8149049a797eb7077e910b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7eda19-2d", "ovs_interfaceid": "ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.878694] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e97ee8-33c1-491a-b244-5630efeb9b2f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.885400] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cd1eb6-7a1b-49c5-be65-8ca8364dfaaf tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Suspending the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 795.885642] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-833517db-4530-48f9-a6f1-3aafbc312a10 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.891867] env[62383]: DEBUG oslo_vmware.api [None req-a6cd1eb6-7a1b-49c5-be65-8ca8364dfaaf tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 795.891867] env[62383]: value = "task-2451630" [ 795.891867] env[62383]: _type = "Task" [ 795.891867] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.898794] env[62383]: DEBUG oslo_vmware.api [None req-a6cd1eb6-7a1b-49c5-be65-8ca8364dfaaf tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451630, 'name': SuspendVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.989311] env[62383]: DEBUG oslo_concurrency.lockutils [None req-afd03a23-e892-4883-b107-70ee40a50e39 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.961s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.033677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.145170] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451629, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.219012] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80d6b7fb-6e12-4fdd-8972-8730384dfb9a tempest-ServersAaction247Test-1357269920 tempest-ServersAaction247Test-1357269920-project-member] Lock "3f508af0-68a2-4898-b9ae-d84cdb8a4cd9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.212s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.296180] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Releasing lock "refresh_cache-282f2c94-7a63-4eef-aa80-7d67d0a0972a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.296527] env[62383]: DEBUG nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Instance network_info: |[{"id": "ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3", "address": "fa:16:3e:ab:e2:99", "network": {"id": "86f13b93-2f69-4ec7-b838-5d8cb11a1051", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1138221332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "304a62370f8149049a797eb7077e910b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7eda19-2d", "ovs_interfaceid": "ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 796.297080] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:e2:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 796.308431] env[62383]: DEBUG oslo.service.loopingcall [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.312187] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 796.312699] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b35066b3-94ce-4a43-a9d4-9da34ff48d90 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.335791] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.335791] env[62383]: value = "task-2451631" [ 796.335791] env[62383]: _type = "Task" [ 796.335791] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.344735] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451631, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.401950] env[62383]: DEBUG oslo_vmware.api [None req-a6cd1eb6-7a1b-49c5-be65-8ca8364dfaaf tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451630, 'name': SuspendVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.537491] env[62383]: DEBUG nova.compute.manager [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62383) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 796.537933] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.646911] env[62383]: DEBUG oslo_vmware.api [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451629, 'name': PowerOnVM_Task, 'duration_secs': 0.731016} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.647396] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.653583] env[62383]: DEBUG nova.compute.manager [None req-58dc8be3-0b99-4de4-94a0-487987e38c79 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.654461] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c04f8e4-4bdc-4e39-8b51-26301e0cb95a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.757622] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166bd630-da29-4207-9ce0-fed13c397981 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.766490] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d72610e-2946-4d3e-94ad-b7aeb7bc0424 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.799015] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a433d8a-e7d6-421f-b08f-53bcbb0fea91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.810428] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3da8813-817f-4a80-b946-4331c0951b5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.824864] env[62383]: DEBUG nova.compute.provider_tree [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.845277] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451631, 'name': CreateVM_Task, 'duration_secs': 0.306083} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.845447] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 796.846135] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.846308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.846643] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 796.846902] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d7eca04-a5c3-41dc-8736-9c145142d6c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.851752] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 796.851752] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1c2f9-28ae-1ee3-7088-7a91f2a93b72" [ 796.851752] env[62383]: _type = "Task" [ 796.851752] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.859160] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1c2f9-28ae-1ee3-7088-7a91f2a93b72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.861912] env[62383]: DEBUG nova.compute.manager [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Received event network-changed-ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 796.861912] env[62383]: DEBUG nova.compute.manager [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Refreshing instance network info cache due to event network-changed-ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 796.861912] env[62383]: DEBUG oslo_concurrency.lockutils [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] Acquiring lock "refresh_cache-282f2c94-7a63-4eef-aa80-7d67d0a0972a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.861912] env[62383]: DEBUG oslo_concurrency.lockutils [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] Acquired lock "refresh_cache-282f2c94-7a63-4eef-aa80-7d67d0a0972a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.861912] env[62383]: DEBUG nova.network.neutron [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Refreshing network info cache for port ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 796.902648] env[62383]: DEBUG oslo_vmware.api [None req-a6cd1eb6-7a1b-49c5-be65-8ca8364dfaaf tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451630, 'name': SuspendVM_Task, 'duration_secs': 0.698138} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.902822] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a6cd1eb6-7a1b-49c5-be65-8ca8364dfaaf tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Suspended the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 796.903011] env[62383]: DEBUG nova.compute.manager [None req-a6cd1eb6-7a1b-49c5-be65-8ca8364dfaaf tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.903813] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae39c97-977e-440b-be0e-71a6b27dff94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.328434] env[62383]: DEBUG nova.scheduler.client.report [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.362439] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c1c2f9-28ae-1ee3-7088-7a91f2a93b72, 'name': SearchDatastore_Task, 'duration_secs': 0.00921} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.362846] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.362982] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.363212] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.363370] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.363555] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.365774] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-466edc55-b1da-4ae6-9280-b43d39c14575 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.374351] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.374543] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 797.375348] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b27d714-d823-42e3-bd77-c792f45ba34e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.381914] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 797.381914] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524cb1f3-2a59-0506-9d7a-94026c1d917f" [ 797.381914] env[62383]: _type = "Task" [ 797.381914] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.391333] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524cb1f3-2a59-0506-9d7a-94026c1d917f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.726290] env[62383]: DEBUG nova.network.neutron [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Updated VIF entry in instance network info cache for port ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 797.726774] env[62383]: DEBUG nova.network.neutron [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Updating instance_info_cache with network_info: [{"id": "ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3", "address": "fa:16:3e:ab:e2:99", "network": {"id": "86f13b93-2f69-4ec7-b838-5d8cb11a1051", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1138221332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "304a62370f8149049a797eb7077e910b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba7eda19-2d", "ovs_interfaceid": "ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.834247] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.147s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.837351] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 43.515s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.879281] env[62383]: INFO nova.scheduler.client.report [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleted allocations for instance 563840a8-8fa7-4bfa-9912-933c14e7076a [ 797.892373] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524cb1f3-2a59-0506-9d7a-94026c1d917f, 'name': SearchDatastore_Task, 'duration_secs': 0.030124} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.893438] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2031cc10-a34d-422a-b52f-866dbdc61f62 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.898589] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 797.898589] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]529e5b00-bd69-d322-0609-aa39907118b2" [ 797.898589] env[62383]: _type = "Task" [ 797.898589] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.906183] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529e5b00-bd69-d322-0609-aa39907118b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.229507] env[62383]: DEBUG oslo_concurrency.lockutils [req-40a0dec8-b72b-4069-a9a4-3f0448abf336 req-97a4b5ad-5a1d-4a3c-9d66-b216fad7ec02 service nova] Releasing lock "refresh_cache-282f2c94-7a63-4eef-aa80-7d67d0a0972a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.341402] env[62383]: INFO nova.compute.claims [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.389314] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62757fbc-a67b-496e-84e2-73fef221957d tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "563840a8-8fa7-4bfa-9912-933c14e7076a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.709s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.411372] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529e5b00-bd69-d322-0609-aa39907118b2, 'name': SearchDatastore_Task, 'duration_secs': 0.024886} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.412558] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.412558] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 282f2c94-7a63-4eef-aa80-7d67d0a0972a/282f2c94-7a63-4eef-aa80-7d67d0a0972a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 798.414794] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb277787-2125-414d-b574-3face21d58eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.420282] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 798.420282] env[62383]: value = "task-2451632" [ 798.420282] env[62383]: _type = "Task" [ 798.420282] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.429636] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451632, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.603449] env[62383]: INFO nova.compute.manager [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Unrescuing [ 798.603449] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.603449] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquired lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.603449] env[62383]: DEBUG nova.network.neutron [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 798.850339] env[62383]: INFO nova.compute.resource_tracker [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating resource usage from migration de2bbf7b-fb36-4da7-9a39-76edd8e5241b [ 798.934396] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451632, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503858} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.937090] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 282f2c94-7a63-4eef-aa80-7d67d0a0972a/282f2c94-7a63-4eef-aa80-7d67d0a0972a.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 798.937324] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 798.938551] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2621cfe-168d-4a0b-8fdc-bbdc8e840d0f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.944645] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 798.944645] env[62383]: value = "task-2451633" [ 798.944645] env[62383]: _type = "Task" [ 798.944645] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.961376] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.393496] env[62383]: DEBUG nova.network.neutron [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Updating instance_info_cache with network_info: [{"id": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "address": "fa:16:3e:2b:e9:9a", "network": {"id": "c1ceac9a-f4dd-41e3-9156-9fca4c3727b3", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1636786674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a657c912fef04c5ca8c0b5d96a8a3064", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5efce30e-48dd-493a-a354-f562a8adf7af", "external-id": "nsx-vlan-transportzone-283", "segmentation_id": 283, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6625886-1c", "ovs_interfaceid": "a6625886-1cc1-4c4d-bd08-6b5221d4a2c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.457115] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068572} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.457115] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 799.457955] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726c7946-cf27-48f7-b11e-a231bceb6320 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.461239] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e4e1cf-1993-4b03-8eee-b9595a1adf8b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.481594] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486d40c4-a22c-41f7-8820-5d369ff4e535 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.494069] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 282f2c94-7a63-4eef-aa80-7d67d0a0972a/282f2c94-7a63-4eef-aa80-7d67d0a0972a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 799.494400] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3ee5904-ee21-4e12-94e1-eabd73095373 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.544119] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b07579-e1b6-44f9-88c2-b26b9b0ff7ce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.553024] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 799.553024] env[62383]: value = "task-2451634" [ 799.553024] env[62383]: _type = "Task" [ 799.553024] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.558343] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66d694e-fdb8-4358-bbc9-e26ce8ae8a20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.565750] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451634, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.576441] env[62383]: DEBUG nova.compute.provider_tree [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.853361] env[62383]: DEBUG nova.compute.manager [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.854313] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c8bfd3-2d37-4cb9-853e-c49807d7a2f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.899601] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Releasing lock "refresh_cache-a27fcace-4fb3-48fb-946d-b8057f6ee601" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.899742] env[62383]: DEBUG nova.objects.instance [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lazy-loading 'flavor' on Instance uuid a27fcace-4fb3-48fb-946d-b8057f6ee601 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 800.061127] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451634, 'name': ReconfigVM_Task, 'duration_secs': 0.284219} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.061437] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 282f2c94-7a63-4eef-aa80-7d67d0a0972a/282f2c94-7a63-4eef-aa80-7d67d0a0972a.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 800.062068] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4380573-9501-49a2-a092-3662ca1e0eea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.068503] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 800.068503] env[62383]: value = "task-2451635" [ 800.068503] env[62383]: _type = "Task" [ 800.068503] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.076217] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451635, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.082041] env[62383]: DEBUG nova.scheduler.client.report [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 800.370375] env[62383]: INFO nova.compute.manager [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] instance snapshotting [ 800.370573] env[62383]: WARNING nova.compute.manager [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 800.373501] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9916d45e-f72c-42cc-8438-9c0f7291c110 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.401409] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411a6f7d-ff52-412d-a616-632c758267a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.406962] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8662dd77-5782-439b-a1c2-d802aab2ee7f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.437734] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "2eba2920-7912-475b-a198-890743aa5255" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.437869] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "2eba2920-7912-475b-a198-890743aa5255" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.439104] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 800.439681] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-abef3c23-3d02-4b05-9768-2ff76c388cce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.446133] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 800.446133] env[62383]: value = "task-2451636" [ 800.446133] env[62383]: _type = "Task" [ 800.446133] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.455937] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451636, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.578243] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451635, 'name': Rename_Task, 'duration_secs': 0.135917} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.578516] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 800.578781] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51b0d12d-037a-4e21-a2a2-14e67be8ce11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.585574] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.749s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.585768] env[62383]: INFO nova.compute.manager [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Migrating [ 800.595344] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 42.848s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.595527] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.595678] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 800.595969] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.736s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.597541] env[62383]: INFO nova.compute.claims [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.602324] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 800.602324] env[62383]: value = "task-2451637" [ 800.602324] env[62383]: _type = "Task" [ 800.602324] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.603983] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bf7f30-eeb9-4ef3-af77-606f12fedb65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.624111] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4c88b1-2825-4d1a-9667-35107aeb90c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.629648] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451637, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.642806] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b863d6a-21ce-405b-9e6c-fab9bb1f5b4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.651205] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec202e1-7d58-4f75-81c4-56ef6bcbef68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.684780] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178215MB free_disk=144GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 800.685016] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.921311] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 800.921613] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e9722d27-4638-4023-b5fe-5b985f5eb281 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.933082] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 800.933082] env[62383]: value = "task-2451638" [ 800.933082] env[62383]: _type = "Task" [ 800.933082] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.940958] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451638, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.942713] env[62383]: DEBUG nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.955647] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451636, 'name': PowerOffVM_Task, 'duration_secs': 0.198927} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.955647] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 800.960135] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Reconfiguring VM instance instance-0000003a to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 800.960421] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acf0a124-a9d1-4ae3-9b74-8c6c10008470 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.978771] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 800.978771] env[62383]: value = "task-2451639" [ 800.978771] env[62383]: _type = "Task" [ 800.978771] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.987811] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451639, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.116297] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.116419] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.116682] env[62383]: DEBUG nova.network.neutron [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.121217] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451637, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.445876] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451638, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.468339] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.490059] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451639, 'name': ReconfigVM_Task, 'duration_secs': 0.242168} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.491597] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Reconfigured VM instance instance-0000003a to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 801.491597] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.491597] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46b434e8-da6d-42cf-9abf-0cdeb53253ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.498083] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 801.498083] env[62383]: value = "task-2451640" [ 801.498083] env[62383]: _type = "Task" [ 801.498083] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.506977] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.620609] env[62383]: DEBUG oslo_vmware.api [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451637, 'name': PowerOnVM_Task, 'duration_secs': 1.028714} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.621469] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 801.621469] env[62383]: INFO nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Took 7.88 seconds to spawn the instance on the hypervisor. [ 801.621588] env[62383]: DEBUG nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.625382] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9f8384-d11d-48fd-8d41-f4fc553b084b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.893073] env[62383]: DEBUG nova.network.neutron [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.947579] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451638, 'name': CreateSnapshot_Task, 'duration_secs': 0.601256} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.951070] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 801.952661] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b57b9e-0945-419a-a6ec-619af16e4d48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.010297] env[62383]: DEBUG oslo_vmware.api [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451640, 'name': PowerOnVM_Task, 'duration_secs': 0.399261} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.013773] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.013773] env[62383]: DEBUG nova.compute.manager [None req-f01f6e11-5ef7-48e2-bf37-595be2a5a2f1 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 802.014900] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c800ad6-7451-4cc1-a5c8-c5773ded9223 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.150606] env[62383]: INFO nova.compute.manager [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Took 57.45 seconds to build instance. [ 802.244259] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "a04a6a53-cca8-4e15-b840-cb1394e5b188" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.244259] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.248548] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004f6164-eafe-4531-890e-2d996331297a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.255895] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad37130-1d69-43f4-a052-972e90246e70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.287846] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6a8ca3-5f70-4cec-85aa-966ab58d05f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.295484] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d8b029-bf9d-40a7-8ece-e2862b7f302b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.311530] env[62383]: DEBUG nova.compute.provider_tree [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.400260] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.474912] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 802.475237] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cfe96bb0-52b9-40fd-bc5f-fe871c18f4ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.483574] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 802.483574] env[62383]: value = "task-2451641" [ 802.483574] env[62383]: _type = "Task" [ 802.483574] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.491943] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451641, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.652524] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6e991ce0-3357-4bad-9a42-13f8d42537e7 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.370s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.713020] env[62383]: DEBUG nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.819170] env[62383]: DEBUG nova.scheduler.client.report [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.000624] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451641, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.238012] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.326638] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.730s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.327423] env[62383]: DEBUG nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 803.331519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 44.003s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.331777] env[62383]: DEBUG nova.objects.instance [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 803.496470] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451641, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.504021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "a27fcace-4fb3-48fb-946d-b8057f6ee601" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.504021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.504021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "a27fcace-4fb3-48fb-946d-b8057f6ee601-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.504021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.504247] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.504936] env[62383]: INFO nova.compute.manager [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Terminating instance [ 803.837539] env[62383]: DEBUG nova.compute.utils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 803.842127] env[62383]: DEBUG nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 803.842325] env[62383]: DEBUG nova.network.neutron [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 803.886950] env[62383]: DEBUG nova.policy [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e94f486c637c4b9f8c3cfa649688a603', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e320302a6b1e466e887c787006413dec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.915542] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7ca271-50f8-4a7e-8ca6-f28b5da95409 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.934500] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance '8a165d96-f503-4bc5-bff4-e6a85201e137' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 803.996142] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451641, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.009444] env[62383]: DEBUG nova.compute.manager [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 804.009742] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.010511] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd2dc33-4eac-462d-bf79-21c013d7d4f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.017764] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.018108] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e482e081-9acc-4c4f-9236-bbf374f5e302 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.024395] env[62383]: DEBUG oslo_vmware.api [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 804.024395] env[62383]: value = "task-2451642" [ 804.024395] env[62383]: _type = "Task" [ 804.024395] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.032320] env[62383]: DEBUG oslo_vmware.api [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.168160] env[62383]: DEBUG nova.network.neutron [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Successfully created port: 5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.346019] env[62383]: DEBUG nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 804.348341] env[62383]: DEBUG oslo_concurrency.lockutils [None req-904ac40a-4ba5-4d92-8dcf-ca19460c5e2e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.351862] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.907s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.352532] env[62383]: DEBUG nova.objects.instance [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lazy-loading 'resources' on Instance uuid a10f5b03-c45b-4cc2-923f-3227665d236c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 804.440516] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.441228] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae9a6780-84e9-45e2-81d9-b004247529ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.449607] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 804.449607] env[62383]: value = "task-2451643" [ 804.449607] env[62383]: _type = "Task" [ 804.449607] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.458431] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451643, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.495305] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451641, 'name': CloneVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.534102] env[62383]: DEBUG oslo_vmware.api [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451642, 'name': PowerOffVM_Task, 'duration_secs': 0.357147} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.534386] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 804.534649] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 804.534988] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d8bfe3b-5ab2-459d-ae66-cfecab28e3f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.594905] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 804.595159] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 804.595349] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Deleting the datastore file [datastore1] a27fcace-4fb3-48fb-946d-b8057f6ee601 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 804.595624] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a0de2eb-d5cf-4fb2-a03d-b1b0d3d45685 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.601961] env[62383]: DEBUG oslo_vmware.api [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 804.601961] env[62383]: value = "task-2451645" [ 804.601961] env[62383]: _type = "Task" [ 804.601961] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.609618] env[62383]: DEBUG oslo_vmware.api [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.961942] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 804.962211] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance '8a165d96-f503-4bc5-bff4-e6a85201e137' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 804.998875] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451641, 'name': CloneVM_Task, 'duration_secs': 2.046704} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.001332] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Created linked-clone VM from snapshot [ 805.002267] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3a51fa-6e4c-4091-a9ad-4c0585981fce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.009429] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Uploading image ee3729ee-5379-458f-8542-4fd2440f6cf5 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 805.036479] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 805.036479] env[62383]: value = "vm-496478" [ 805.036479] env[62383]: _type = "VirtualMachine" [ 805.036479] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 805.036786] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-db62edb5-4d50-4d3a-ab9a-3b0f1264cc3a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.046216] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease: (returnval){ [ 805.046216] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ab2c35-28c3-32a5-e18c-0b8348edb0a2" [ 805.046216] env[62383]: _type = "HttpNfcLease" [ 805.046216] env[62383]: } obtained for exporting VM: (result){ [ 805.046216] env[62383]: value = "vm-496478" [ 805.046216] env[62383]: _type = "VirtualMachine" [ 805.046216] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 805.046557] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the lease: (returnval){ [ 805.046557] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ab2c35-28c3-32a5-e18c-0b8348edb0a2" [ 805.046557] env[62383]: _type = "HttpNfcLease" [ 805.046557] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 805.053961] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 805.053961] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ab2c35-28c3-32a5-e18c-0b8348edb0a2" [ 805.053961] env[62383]: _type = "HttpNfcLease" [ 805.053961] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 805.113540] env[62383]: DEBUG oslo_vmware.api [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143113} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.113863] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.114158] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.114365] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.114665] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.114869] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.116355] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 805.116548] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 805.116744] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.117030] env[62383]: INFO nova.compute.manager [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Took 1.11 seconds to destroy the instance on the hypervisor. [ 805.117179] env[62383]: DEBUG oslo.service.loopingcall [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 805.117590] env[62383]: INFO nova.compute.manager [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Terminating instance [ 805.120772] env[62383]: DEBUG nova.compute.manager [-] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 805.120870] env[62383]: DEBUG nova.network.neutron [-] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 805.303268] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc5ac88-cbd1-449c-bcf4-b2dfb021a6f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.311020] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a4fd0d-4676-4c75-8acc-cc86571be1cb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.346527] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88904f1-6a08-4494-a1ab-69d7d11adc49 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.355236] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032678ff-9da8-4bc8-a51a-4a24d8f0fef8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.362351] env[62383]: DEBUG nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 805.373939] env[62383]: DEBUG nova.compute.provider_tree [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.398307] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.398541] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.398715] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.398912] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.399075] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.399222] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.399428] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.399588] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.399784] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.400153] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.400153] env[62383]: DEBUG nova.virt.hardware [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.400981] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4659ec56-86c5-4b05-a335-09bdeb484e65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.409360] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048e1b63-4cf5-41e8-84cf-3c79db4950c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.463354] env[62383]: DEBUG nova.compute.manager [req-a969656f-a29e-43ae-9ae0-cd54a3c2f54f req-0d814cad-477c-446a-8647-0d8ca0948b64 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Received event network-vif-deleted-a6625886-1cc1-4c4d-bd08-6b5221d4a2c2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 805.463850] env[62383]: INFO nova.compute.manager [req-a969656f-a29e-43ae-9ae0-cd54a3c2f54f req-0d814cad-477c-446a-8647-0d8ca0948b64 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Neutron deleted interface a6625886-1cc1-4c4d-bd08-6b5221d4a2c2; detaching it from the instance and deleting it from the info cache [ 805.463850] env[62383]: DEBUG nova.network.neutron [req-a969656f-a29e-43ae-9ae0-cd54a3c2f54f req-0d814cad-477c-446a-8647-0d8ca0948b64 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.469340] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 805.469340] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.469340] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 805.469469] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.469642] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 805.469834] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 805.470094] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 805.470303] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 805.470510] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 805.470715] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 805.470932] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 805.476894] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d165156-3c16-447f-8494-6662bbe4c9a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.494286] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 805.494286] env[62383]: value = "task-2451647" [ 805.494286] env[62383]: _type = "Task" [ 805.494286] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.504425] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451647, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.554985] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 805.554985] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ab2c35-28c3-32a5-e18c-0b8348edb0a2" [ 805.554985] env[62383]: _type = "HttpNfcLease" [ 805.554985] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 805.555294] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 805.555294] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ab2c35-28c3-32a5-e18c-0b8348edb0a2" [ 805.555294] env[62383]: _type = "HttpNfcLease" [ 805.555294] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 805.556026] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1e04f5-8dfb-4eff-b23e-f89e92ab3500 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.564162] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52106899-3b06-4d7f-6364-1ce729aaf31d/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 805.564356] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52106899-3b06-4d7f-6364-1ce729aaf31d/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 805.637805] env[62383]: DEBUG nova.compute.manager [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 805.638128] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 805.639397] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b5d5a0-abbf-417f-9ab5-16c1dd7ac1da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.647965] env[62383]: DEBUG nova.compute.manager [req-c53fdc80-0047-4503-9fc0-7a4d77b852ad req-73a0e80e-7de7-479e-9834-79aa74fd65f9 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Received event network-vif-plugged-5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 805.648449] env[62383]: DEBUG oslo_concurrency.lockutils [req-c53fdc80-0047-4503-9fc0-7a4d77b852ad req-73a0e80e-7de7-479e-9834-79aa74fd65f9 service nova] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.648565] env[62383]: DEBUG oslo_concurrency.lockutils [req-c53fdc80-0047-4503-9fc0-7a4d77b852ad req-73a0e80e-7de7-479e-9834-79aa74fd65f9 service nova] Lock "2f028680-8db4-474a-8f24-880c4702877b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.648790] env[62383]: DEBUG oslo_concurrency.lockutils [req-c53fdc80-0047-4503-9fc0-7a4d77b852ad req-73a0e80e-7de7-479e-9834-79aa74fd65f9 service nova] Lock "2f028680-8db4-474a-8f24-880c4702877b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.649017] env[62383]: DEBUG nova.compute.manager [req-c53fdc80-0047-4503-9fc0-7a4d77b852ad req-73a0e80e-7de7-479e-9834-79aa74fd65f9 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] No waiting events found dispatching network-vif-plugged-5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 805.649244] env[62383]: WARNING nova.compute.manager [req-c53fdc80-0047-4503-9fc0-7a4d77b852ad req-73a0e80e-7de7-479e-9834-79aa74fd65f9 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Received unexpected event network-vif-plugged-5ba29557-a079-4404-9449-eeff24a0a3e4 for instance with vm_state building and task_state spawning. [ 805.656237] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 805.656540] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e67b6bf-5f7b-4e6b-a9e1-8640dc9e947b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.664585] env[62383]: DEBUG oslo_vmware.api [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 805.664585] env[62383]: value = "task-2451648" [ 805.664585] env[62383]: _type = "Task" [ 805.664585] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.675638] env[62383]: DEBUG oslo_vmware.api [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.691772] env[62383]: DEBUG nova.network.neutron [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Successfully updated port: 5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.696529] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-107c3798-0319-4a59-8a2b-7396b1ce3880 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.877709] env[62383]: DEBUG nova.scheduler.client.report [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 805.943565] env[62383]: DEBUG nova.network.neutron [-] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.967403] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c729e050-c85b-434c-92c2-c2ddcaf7c943 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.980129] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661f7cf7-2e92-4662-9314-85d3214d8bc1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.010751] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451647, 'name': ReconfigVM_Task, 'duration_secs': 0.17356} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.011262] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance '8a165d96-f503-4bc5-bff4-e6a85201e137' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 806.049526] env[62383]: DEBUG nova.compute.manager [req-a969656f-a29e-43ae-9ae0-cd54a3c2f54f req-0d814cad-477c-446a-8647-0d8ca0948b64 service nova] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Detach interface failed, port_id=a6625886-1cc1-4c4d-bd08-6b5221d4a2c2, reason: Instance a27fcace-4fb3-48fb-946d-b8057f6ee601 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 806.176752] env[62383]: DEBUG oslo_vmware.api [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451648, 'name': PowerOffVM_Task, 'duration_secs': 0.186353} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.177102] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 806.177342] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 806.177698] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ecb7752-c0f3-41cd-b62f-b355aea908b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.200879] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.201145] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.201367] env[62383]: DEBUG nova.network.neutron [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.242951] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 806.243604] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 806.244056] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleting the datastore file [datastore2] 282f2c94-7a63-4eef-aa80-7d67d0a0972a {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 806.244477] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79b048eb-0af3-4f5a-9910-2bf2cbc23be5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.252054] env[62383]: DEBUG oslo_vmware.api [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 806.252054] env[62383]: value = "task-2451650" [ 806.252054] env[62383]: _type = "Task" [ 806.252054] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.262271] env[62383]: DEBUG oslo_vmware.api [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.383049] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.384975] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.447s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.386885] env[62383]: INFO nova.compute.claims [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.414356] env[62383]: INFO nova.scheduler.client.report [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted allocations for instance a10f5b03-c45b-4cc2-923f-3227665d236c [ 806.449794] env[62383]: INFO nova.compute.manager [-] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Took 1.33 seconds to deallocate network for instance. [ 806.554802] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 806.555474] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.555820] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 806.556179] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.556452] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 806.556764] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 806.557157] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 806.557489] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 806.557787] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 806.558182] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 806.558587] env[62383]: DEBUG nova.virt.hardware [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 806.568773] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Reconfiguring VM instance instance-0000000c to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 806.569681] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbb5e6d2-9e13-4c63-aa41-5ae04b411f6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.598032] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 806.598032] env[62383]: value = "task-2451651" [ 806.598032] env[62383]: _type = "Task" [ 806.598032] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.606927] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451651, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.733734] env[62383]: DEBUG nova.network.neutron [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.762872] env[62383]: DEBUG oslo_vmware.api [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188985} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.763641] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 806.763897] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 806.764495] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 806.764703] env[62383]: INFO nova.compute.manager [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 806.765043] env[62383]: DEBUG oslo.service.loopingcall [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.765310] env[62383]: DEBUG nova.compute.manager [-] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 806.765439] env[62383]: DEBUG nova.network.neutron [-] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 806.890838] env[62383]: DEBUG nova.network.neutron [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.924562] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6eb4c102-2c9f-42c3-842b-81400075fd62 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "a10f5b03-c45b-4cc2-923f-3227665d236c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.958932] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.115014] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451651, 'name': ReconfigVM_Task, 'duration_secs': 0.170153} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.116078] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Reconfigured VM instance instance-0000000c to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 807.116636] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f149215c-2441-4965-bb24-2c9a540a4eb0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.142676] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 8a165d96-f503-4bc5-bff4-e6a85201e137/8a165d96-f503-4bc5-bff4-e6a85201e137.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.143080] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-021043ea-2efb-4624-b946-2a8040439113 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.166626] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 807.166626] env[62383]: value = "task-2451652" [ 807.166626] env[62383]: _type = "Task" [ 807.166626] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.177026] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451652, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.395701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.396076] env[62383]: DEBUG nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Instance network_info: |[{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.396523] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:2c:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ba29557-a079-4404-9449-eeff24a0a3e4', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.404497] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Creating folder: Project (e320302a6b1e466e887c787006413dec). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.407575] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9de53570-d3b0-4162-91bf-9f597dfcc489 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.418868] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Created folder: Project (e320302a6b1e466e887c787006413dec) in parent group-v496304. [ 807.419086] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Creating folder: Instances. Parent ref: group-v496479. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.419334] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08580b72-afdf-4ad3-a1a1-8a392a946b94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.428126] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Created folder: Instances in parent group-v496479. [ 807.428722] env[62383]: DEBUG oslo.service.loopingcall [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.428722] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.428917] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d8e5e59-45cc-4414-ac8f-e5f8bb501ea2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.451550] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.451550] env[62383]: value = "task-2451655" [ 807.451550] env[62383]: _type = "Task" [ 807.451550] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.459435] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451655, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.516228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.516580] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.516887] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.517150] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.517363] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.519984] env[62383]: INFO nova.compute.manager [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Terminating instance [ 807.556507] env[62383]: DEBUG nova.compute.manager [req-3399ef64-e060-4b33-a477-d49452b2c34e req-7825b8cb-b6b2-450a-b41b-e6ee8001637b service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Received event network-vif-deleted-ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 807.556759] env[62383]: INFO nova.compute.manager [req-3399ef64-e060-4b33-a477-d49452b2c34e req-7825b8cb-b6b2-450a-b41b-e6ee8001637b service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Neutron deleted interface ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3; detaching it from the instance and deleting it from the info cache [ 807.556960] env[62383]: DEBUG nova.network.neutron [req-3399ef64-e060-4b33-a477-d49452b2c34e req-7825b8cb-b6b2-450a-b41b-e6ee8001637b service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.663019] env[62383]: DEBUG nova.network.neutron [-] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.674138] env[62383]: DEBUG nova.compute.manager [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Received event network-changed-5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 807.674394] env[62383]: DEBUG nova.compute.manager [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Refreshing instance network info cache due to event network-changed-5ba29557-a079-4404-9449-eeff24a0a3e4. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 807.674651] env[62383]: DEBUG oslo_concurrency.lockutils [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.674749] env[62383]: DEBUG oslo_concurrency.lockutils [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.674992] env[62383]: DEBUG nova.network.neutron [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Refreshing network info cache for port 5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.682477] env[62383]: DEBUG oslo_vmware.api [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451652, 'name': ReconfigVM_Task, 'duration_secs': 0.359915} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.685371] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 8a165d96-f503-4bc5-bff4-e6a85201e137/8a165d96-f503-4bc5-bff4-e6a85201e137.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 807.685690] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance '8a165d96-f503-4bc5-bff4-e6a85201e137' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 807.956019] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11ae67b-8036-43ef-9632-fb97bd4a6458 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.969352] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a52d692-acc7-454e-9f44-6408beb65eb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.972932] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451655, 'name': CreateVM_Task, 'duration_secs': 0.372866} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.973119] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 807.974708] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.974882] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.975251] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 807.975505] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d80ec47c-ef53-4ce8-bbc2-c8e4c5f879c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.005650] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfde78f-2489-419a-bb5c-239f87434638 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.010072] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 808.010072] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5223ea3f-0250-8328-1b03-a17139519617" [ 808.010072] env[62383]: _type = "Task" [ 808.010072] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.018475] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1882a9bf-4bcd-4208-8bae-391bda2afde4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.025539] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5223ea3f-0250-8328-1b03-a17139519617, 'name': SearchDatastore_Task, 'duration_secs': 0.010705} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.026124] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.026361] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.026596] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.026866] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.027216] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.027825] env[62383]: DEBUG nova.compute.manager [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 808.028024] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 808.028255] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b3acf9d-4fa6-4106-b36a-a3144bbc1938 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.038495] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb8907a-d577-48a2-8e3f-8f80f7d34b09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.041457] env[62383]: DEBUG nova.compute.provider_tree [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.048115] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 808.049272] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a6069b7-0d22-468d-a276-41935cc79149 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.050942] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.051130] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 808.054043] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d23d40f-7fe5-4a3e-8818-82faaf8c8eb6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.056210] env[62383]: DEBUG nova.network.neutron [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updated VIF entry in instance network info cache for port 5ba29557-a079-4404-9449-eeff24a0a3e4. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.056610] env[62383]: DEBUG nova.network.neutron [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.059171] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e4ae9825-3f74-42e9-aaa9-325a4426d3e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.062906] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 808.062906] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b27495-be93-403a-c4b9-b76920f46e5e" [ 808.062906] env[62383]: _type = "Task" [ 808.062906] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.064636] env[62383]: DEBUG oslo_vmware.api [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 808.064636] env[62383]: value = "task-2451656" [ 808.064636] env[62383]: _type = "Task" [ 808.064636] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.078207] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9f4b4b-f533-4299-9f95-d6ea32e2f392 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.097019] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b27495-be93-403a-c4b9-b76920f46e5e, 'name': SearchDatastore_Task, 'duration_secs': 0.011996} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.097817] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-086920ac-2231-4ffc-96e0-051a8b28e08b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.100451] env[62383]: DEBUG oslo_vmware.api [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.103511] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 808.103511] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d90669-aca4-587f-1fcf-9b5b672306b4" [ 808.103511] env[62383]: _type = "Task" [ 808.103511] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.123407] env[62383]: DEBUG nova.compute.manager [req-3399ef64-e060-4b33-a477-d49452b2c34e req-7825b8cb-b6b2-450a-b41b-e6ee8001637b service nova] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Detach interface failed, port_id=ba7eda19-2d6b-4e07-b357-ece0cfeaa5a3, reason: Instance 282f2c94-7a63-4eef-aa80-7d67d0a0972a could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 808.129438] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d90669-aca4-587f-1fcf-9b5b672306b4, 'name': SearchDatastore_Task, 'duration_secs': 0.010951} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.129697] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.129962] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2f028680-8db4-474a-8f24-880c4702877b/2f028680-8db4-474a-8f24-880c4702877b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 808.130245] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7020b857-3235-46ec-b54f-347912adc66c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.137087] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 808.137087] env[62383]: value = "task-2451657" [ 808.137087] env[62383]: _type = "Task" [ 808.137087] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.146640] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.165305] env[62383]: INFO nova.compute.manager [-] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Took 1.40 seconds to deallocate network for instance. [ 808.193397] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b4b451-3271-4cb7-9149-9b3b0415e6b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.213296] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed01f52-72c0-48a5-8a2b-1ea9f6e2f41b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.232218] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance '8a165d96-f503-4bc5-bff4-e6a85201e137' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 808.545370] env[62383]: DEBUG nova.scheduler.client.report [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.559881] env[62383]: DEBUG oslo_concurrency.lockutils [req-b0e1466d-f02e-4da6-8ab3-8ee7636acfbc req-464d4a14-463e-4043-96a2-582c4b3a0758 service nova] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.580141] env[62383]: DEBUG oslo_vmware.api [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451656, 'name': PowerOffVM_Task, 'duration_secs': 0.205763} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.580305] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.580464] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 808.580670] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecb09c12-492d-4817-85cf-3ea41bbadbeb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.648199] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451657, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.662436] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 808.662436] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 808.662634] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleting the datastore file [datastore1] 2337e9a2-736c-4d58-ac2e-04c8ad813be4 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 808.662847] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9293ccd-e3f5-4ab4-b4bf-b26c47ab2d73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.669315] env[62383]: DEBUG oslo_vmware.api [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 808.669315] env[62383]: value = "task-2451659" [ 808.669315] env[62383]: _type = "Task" [ 808.669315] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.673473] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.678865] env[62383]: DEBUG oslo_vmware.api [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.775106] env[62383]: DEBUG nova.network.neutron [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Port 925071ab-96dd-4c80-901e-9dba6c4a5a9c binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 809.050923] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.052021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 33.004s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.149061] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536974} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.149061] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2f028680-8db4-474a-8f24-880c4702877b/2f028680-8db4-474a-8f24-880c4702877b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 809.149454] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 809.149454] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8948b7b5-ebea-4233-930b-9ddd28de9353 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.156578] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 809.156578] env[62383]: value = "task-2451660" [ 809.156578] env[62383]: _type = "Task" [ 809.156578] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.164477] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.179990] env[62383]: DEBUG oslo_vmware.api [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.230947} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.180322] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 809.180536] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 809.180732] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 809.180943] env[62383]: INFO nova.compute.manager [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 809.181252] env[62383]: DEBUG oslo.service.loopingcall [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.181477] env[62383]: DEBUG nova.compute.manager [-] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 809.181574] env[62383]: DEBUG nova.network.neutron [-] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 809.557259] env[62383]: INFO nova.compute.claims [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 809.564414] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "fad7a4a1-b9ff-4f71-b61d-a401df02fab3" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.564414] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "fad7a4a1-b9ff-4f71-b61d-a401df02fab3" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.581666] env[62383]: DEBUG nova.compute.manager [req-b5c91b94-c567-48c2-ba7e-ef4847eab8c4 req-447ba19d-9e22-44ad-aa70-1794635ad4b7 service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Received event network-vif-deleted-2dd94f31-46c1-4662-9f19-e6f69a3decf8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 809.582128] env[62383]: INFO nova.compute.manager [req-b5c91b94-c567-48c2-ba7e-ef4847eab8c4 req-447ba19d-9e22-44ad-aa70-1794635ad4b7 service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Neutron deleted interface 2dd94f31-46c1-4662-9f19-e6f69a3decf8; detaching it from the instance and deleting it from the info cache [ 809.582128] env[62383]: DEBUG nova.network.neutron [req-b5c91b94-c567-48c2-ba7e-ef4847eab8c4 req-447ba19d-9e22-44ad-aa70-1794635ad4b7 service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.666707] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173751} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.666987] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 809.667889] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2786db8-72e4-479e-b406-24cbbba5e21d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.690929] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 2f028680-8db4-474a-8f24-880c4702877b/2f028680-8db4-474a-8f24-880c4702877b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 809.691237] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d28b1c5d-4bd3-4304-86a2-6b18f71e4811 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.711625] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 809.711625] env[62383]: value = "task-2451661" [ 809.711625] env[62383]: _type = "Task" [ 809.711625] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.720830] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.803773] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.804020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.804204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.027149] env[62383]: DEBUG nova.network.neutron [-] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.067512] env[62383]: INFO nova.compute.resource_tracker [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating resource usage from migration 88f6ac26-e38f-4ff9-9ba8-0b8c72f3e3ab [ 810.070389] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "fad7a4a1-b9ff-4f71-b61d-a401df02fab3" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.507s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.070860] env[62383]: DEBUG nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 810.085724] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7890e351-4440-4436-8913-679a2fe6b096 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.097865] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656c097e-4a69-4ea5-99d5-c4bf28124b48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.147576] env[62383]: DEBUG nova.compute.manager [req-b5c91b94-c567-48c2-ba7e-ef4847eab8c4 req-447ba19d-9e22-44ad-aa70-1794635ad4b7 service nova] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Detach interface failed, port_id=2dd94f31-46c1-4662-9f19-e6f69a3decf8, reason: Instance 2337e9a2-736c-4d58-ac2e-04c8ad813be4 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 810.228845] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451661, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.533579] env[62383]: INFO nova.compute.manager [-] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Took 1.35 seconds to deallocate network for instance. [ 810.579086] env[62383]: DEBUG nova.compute.utils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 810.584798] env[62383]: DEBUG nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 810.584798] env[62383]: DEBUG nova.network.neutron [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 810.669186] env[62383]: DEBUG nova.policy [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f197ead8dedf4b3885fc9b81e4ca7540', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd64b5facb31d49c8bbf750d98bafb81f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 810.708414] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1e16e8-47bd-4381-8abc-cff7b00b768e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.719933] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e24d7e2-447f-4fcb-9605-9cf352d8e73e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.726657] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451661, 'name': ReconfigVM_Task, 'duration_secs': 0.706515} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.727246] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 2f028680-8db4-474a-8f24-880c4702877b/2f028680-8db4-474a-8f24-880c4702877b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 810.727916] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-945bc9a2-42df-4211-9ac4-581abf877851 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.759430] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0725b411-b3b8-406e-989f-392e722b9678 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.763177] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 810.763177] env[62383]: value = "task-2451662" [ 810.763177] env[62383]: _type = "Task" [ 810.763177] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.770495] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb90e493-c69f-4980-bd45-47278a8818f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.778105] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451662, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.789371] env[62383]: DEBUG nova.compute.provider_tree [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.864386] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.864517] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.864732] env[62383]: DEBUG nova.network.neutron [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.041318] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.082288] env[62383]: DEBUG nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 811.090412] env[62383]: DEBUG nova.network.neutron [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Successfully created port: 2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.274841] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451662, 'name': Rename_Task, 'duration_secs': 0.177111} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.275198] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 811.275424] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cab487bb-ccf9-4906-97f2-0418af985850 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.281394] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 811.281394] env[62383]: value = "task-2451663" [ 811.281394] env[62383]: _type = "Task" [ 811.281394] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.289784] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.292849] env[62383]: DEBUG nova.scheduler.client.report [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 811.793935] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451663, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.797991] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.746s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.798350] env[62383]: INFO nova.compute.manager [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Migrating [ 811.806195] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.328s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.806657] env[62383]: DEBUG nova.objects.instance [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lazy-loading 'resources' on Instance uuid 9c2c55a9-5b24-4d52-8d6b-666609349a3a {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.850820] env[62383]: DEBUG nova.network.neutron [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.092383] env[62383]: DEBUG nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 812.116026] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 812.116026] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.116026] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 812.116026] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.116505] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 812.116505] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 812.116945] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 812.117264] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 812.117565] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 812.119898] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 812.119898] env[62383]: DEBUG nova.virt.hardware [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 812.119898] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de894c87-6879-4c45-a407-cf48466ae299 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.127775] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945d754d-3994-43ad-ac44-72511fcb2e9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.292445] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451663, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.327049] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.327238] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.327435] env[62383]: DEBUG nova.network.neutron [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.353208] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.481149] env[62383]: DEBUG nova.compute.manager [req-ffcf64cf-1ad3-4c01-86b3-407e8540e732 req-f1793749-d1aa-430f-a722-2eeb34d98c36 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Received event network-vif-plugged-2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 812.481451] env[62383]: DEBUG oslo_concurrency.lockutils [req-ffcf64cf-1ad3-4c01-86b3-407e8540e732 req-f1793749-d1aa-430f-a722-2eeb34d98c36 service nova] Acquiring lock "12e6baef-0614-4a12-b958-30b0f56fe486-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.481750] env[62383]: DEBUG oslo_concurrency.lockutils [req-ffcf64cf-1ad3-4c01-86b3-407e8540e732 req-f1793749-d1aa-430f-a722-2eeb34d98c36 service nova] Lock "12e6baef-0614-4a12-b958-30b0f56fe486-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.481847] env[62383]: DEBUG oslo_concurrency.lockutils [req-ffcf64cf-1ad3-4c01-86b3-407e8540e732 req-f1793749-d1aa-430f-a722-2eeb34d98c36 service nova] Lock "12e6baef-0614-4a12-b958-30b0f56fe486-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.482011] env[62383]: DEBUG nova.compute.manager [req-ffcf64cf-1ad3-4c01-86b3-407e8540e732 req-f1793749-d1aa-430f-a722-2eeb34d98c36 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] No waiting events found dispatching network-vif-plugged-2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 812.482192] env[62383]: WARNING nova.compute.manager [req-ffcf64cf-1ad3-4c01-86b3-407e8540e732 req-f1793749-d1aa-430f-a722-2eeb34d98c36 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Received unexpected event network-vif-plugged-2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c for instance with vm_state building and task_state spawning. [ 812.660944] env[62383]: DEBUG nova.network.neutron [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Successfully updated port: 2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 812.795904] env[62383]: DEBUG oslo_vmware.api [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451663, 'name': PowerOnVM_Task, 'duration_secs': 1.143602} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.798744] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 812.798994] env[62383]: INFO nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Took 7.44 seconds to spawn the instance on the hypervisor. [ 812.799212] env[62383]: DEBUG nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 812.800240] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d0276a-a4e8-4014-bacf-04cb273e4bbe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.823955] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5d7433-388d-442a-b9a9-72523c2366db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.835069] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b476a8-f3be-4f07-8da5-dcde6f99bf22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.872530] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cf3e55-d92f-44f8-8988-dab27cdd2587 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.883234] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a3e4a8-7d85-4a4e-987d-797ce51faa42 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.889903] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe611892-3a9f-411b-b1b0-235beb736ab3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.900792] env[62383]: DEBUG nova.compute.provider_tree [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.921789] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e65f7f0-9e35-4b87-8614-8bb1f3bae181 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.930283] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance '8a165d96-f503-4bc5-bff4-e6a85201e137' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 813.118828] env[62383]: DEBUG nova.network.neutron [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance_info_cache with network_info: [{"id": "79458cb2-668a-4c04-882f-c00f465ccd9d", "address": "fa:16:3e:06:eb:ec", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79458cb2-66", "ovs_interfaceid": "79458cb2-668a-4c04-882f-c00f465ccd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.164051] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "refresh_cache-12e6baef-0614-4a12-b958-30b0f56fe486" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.164386] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquired lock "refresh_cache-12e6baef-0614-4a12-b958-30b0f56fe486" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.164548] env[62383]: DEBUG nova.network.neutron [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 813.319796] env[62383]: INFO nova.compute.manager [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Took 54.48 seconds to build instance. [ 813.404277] env[62383]: DEBUG nova.scheduler.client.report [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.435723] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2db45167-2613-4b95-97d0-bc8abea27ef4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance '8a165d96-f503-4bc5-bff4-e6a85201e137' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 813.621374] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.703562] env[62383]: DEBUG nova.network.neutron [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.821430] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e4637a-5112-421a-996e-f94784bddf9d tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.991s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.910949] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.105s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.913461] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.352s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.914968] env[62383]: INFO nova.compute.claims [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.932855] env[62383]: INFO nova.scheduler.client.report [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Deleted allocations for instance 9c2c55a9-5b24-4d52-8d6b-666609349a3a [ 813.948128] env[62383]: DEBUG nova.network.neutron [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Updating instance_info_cache with network_info: [{"id": "2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c", "address": "fa:16:3e:84:20:89", "network": {"id": "51f48e31-6fb2-4f03-bedb-8621ec504520", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1104078117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d64b5facb31d49c8bbf750d98bafb81f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2422df8e-cb", "ovs_interfaceid": "2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.416914] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52106899-3b06-4d7f-6364-1ce729aaf31d/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 814.418053] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a119ec1-9838-48bd-a961-f1895b5cbf1c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.428037] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52106899-3b06-4d7f-6364-1ce729aaf31d/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 814.428119] env[62383]: ERROR oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52106899-3b06-4d7f-6364-1ce729aaf31d/disk-0.vmdk due to incomplete transfer. [ 814.428357] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3ada29e6-b6e5-400c-9198-098fe692d6bd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.436478] env[62383]: DEBUG oslo_vmware.rw_handles [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52106899-3b06-4d7f-6364-1ce729aaf31d/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 814.436777] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Uploaded image ee3729ee-5379-458f-8542-4fd2440f6cf5 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 814.439091] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 814.441955] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-57ef99ba-f591-4281-b4de-c20021bb1f1b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.443861] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f17df58a-7837-4b51-9d8b-9fd7ca276887 tempest-ListImageFiltersTestJSON-643241444 tempest-ListImageFiltersTestJSON-643241444-project-member] Lock "9c2c55a9-5b24-4d52-8d6b-666609349a3a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.134s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.451479] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 814.451479] env[62383]: value = "task-2451664" [ 814.451479] env[62383]: _type = "Task" [ 814.451479] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.455036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Releasing lock "refresh_cache-12e6baef-0614-4a12-b958-30b0f56fe486" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.456056] env[62383]: DEBUG nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Instance network_info: |[{"id": "2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c", "address": "fa:16:3e:84:20:89", "network": {"id": "51f48e31-6fb2-4f03-bedb-8621ec504520", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1104078117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d64b5facb31d49c8bbf750d98bafb81f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2422df8e-cb", "ovs_interfaceid": "2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 814.456192] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:20:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 814.463146] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Creating folder: Project (d64b5facb31d49c8bbf750d98bafb81f). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 814.464445] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df2e94cf-f42e-44ea-bdf4-7a0e31394ce0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.469794] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451664, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.478543] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Created folder: Project (d64b5facb31d49c8bbf750d98bafb81f) in parent group-v496304. [ 814.478659] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Creating folder: Instances. Parent ref: group-v496482. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 814.478937] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eec4b1b9-028b-4a1e-b253-c1ee1698f9c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.491025] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Created folder: Instances in parent group-v496482. [ 814.491025] env[62383]: DEBUG oslo.service.loopingcall [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.491025] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 814.491025] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a57d996-15c8-4f08-84c7-8868e507bf12 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.511044] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.511044] env[62383]: value = "task-2451667" [ 814.511044] env[62383]: _type = "Task" [ 814.511044] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.515940] env[62383]: DEBUG nova.compute.manager [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Received event network-changed-2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 814.517795] env[62383]: DEBUG nova.compute.manager [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Refreshing instance network info cache due to event network-changed-2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 814.517795] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] Acquiring lock "refresh_cache-12e6baef-0614-4a12-b958-30b0f56fe486" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.517795] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] Acquired lock "refresh_cache-12e6baef-0614-4a12-b958-30b0f56fe486" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.517795] env[62383]: DEBUG nova.network.neutron [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Refreshing network info cache for port 2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.523983] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451667, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.710292] env[62383]: DEBUG nova.compute.manager [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Received event network-changed-5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 814.710292] env[62383]: DEBUG nova.compute.manager [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Refreshing instance network info cache due to event network-changed-5ba29557-a079-4404-9449-eeff24a0a3e4. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 814.710456] env[62383]: DEBUG oslo_concurrency.lockutils [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.710526] env[62383]: DEBUG oslo_concurrency.lockutils [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.710694] env[62383]: DEBUG nova.network.neutron [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Refreshing network info cache for port 5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.969232] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451664, 'name': Destroy_Task, 'duration_secs': 0.465717} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.969644] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Destroyed the VM [ 814.969736] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 814.969973] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-008fb62f-d73f-4eec-9ff3-f0ea7b38cbed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.977139] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 814.977139] env[62383]: value = "task-2451668" [ 814.977139] env[62383]: _type = "Task" [ 814.977139] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.990211] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451668, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.027850] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451667, 'name': CreateVM_Task, 'duration_secs': 0.401142} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.029172] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 815.029172] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.029327] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.032871] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 815.032871] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06c83079-edd2-4bc2-94e9-c4db0faa6bd5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.034593] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 815.034593] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e4b37f-768b-2ce7-7368-2a9eb59428a3" [ 815.034593] env[62383]: _type = "Task" [ 815.034593] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.045538] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e4b37f-768b-2ce7-7368-2a9eb59428a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.137486] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445c6f97-8d45-47ae-9c17-08c358c043c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.159456] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance '93234e99-268f-491e-96bd-a77f4c9f164b' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 815.434971] env[62383]: DEBUG nova.network.neutron [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Updated VIF entry in instance network info cache for port 2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 815.435604] env[62383]: DEBUG nova.network.neutron [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Updating instance_info_cache with network_info: [{"id": "2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c", "address": "fa:16:3e:84:20:89", "network": {"id": "51f48e31-6fb2-4f03-bedb-8621ec504520", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1104078117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d64b5facb31d49c8bbf750d98bafb81f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2422df8e-cb", "ovs_interfaceid": "2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.491885] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451668, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.509655] env[62383]: DEBUG nova.network.neutron [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updated VIF entry in instance network info cache for port 5ba29557-a079-4404-9449-eeff24a0a3e4. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 815.510043] env[62383]: DEBUG nova.network.neutron [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.533404] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d54b9e-5b94-430d-a7b0-0b7594aabb4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.549286] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c839c6-61c9-4ee2-a300-e6081f858f99 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.553280] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e4b37f-768b-2ce7-7368-2a9eb59428a3, 'name': SearchDatastore_Task, 'duration_secs': 0.02448} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.553630] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.553904] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.554184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.554348] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.554533] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.555206] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd214846-333e-41c3-a095-9349b0f315ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.585525] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46cfbe08-ab62-4195-9a4a-9afb3634770c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.591781] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.591984] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 815.594592] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27201562-6206-4907-967e-39faf7725fe8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.597964] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3595f6-d31d-468d-8289-3eea0ffa2e80 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.605653] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 815.605653] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a639b2-0765-b756-0aa5-3d984f3a55e2" [ 815.605653] env[62383]: _type = "Task" [ 815.605653] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.613602] env[62383]: DEBUG nova.compute.provider_tree [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.623327] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a639b2-0765-b756-0aa5-3d984f3a55e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.670180] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 815.670483] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c50436d-427d-46bb-93c8-fc34861ae47d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.677655] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 815.677655] env[62383]: value = "task-2451669" [ 815.677655] env[62383]: _type = "Task" [ 815.677655] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.681065] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "8a165d96-f503-4bc5-bff4-e6a85201e137" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 815.681272] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.681457] env[62383]: DEBUG nova.compute.manager [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Going to confirm migration 3 {{(pid=62383) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 815.687806] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.938523] env[62383]: DEBUG oslo_concurrency.lockutils [req-f2c6812d-b2e3-4e39-bcce-2b0fbb7523dc req-729ca2a3-1dea-412c-87f3-6770c5422889 service nova] Releasing lock "refresh_cache-12e6baef-0614-4a12-b958-30b0f56fe486" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.989968] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451668, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.012814] env[62383]: DEBUG oslo_concurrency.lockutils [req-34810de2-d70f-493d-a196-75bf2f17fa63 req-a884ff83-9b43-4470-866c-b16fe5f49f1d service nova] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.119188] env[62383]: DEBUG nova.scheduler.client.report [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.126610] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a639b2-0765-b756-0aa5-3d984f3a55e2, 'name': SearchDatastore_Task, 'duration_secs': 0.033958} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.127667] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa48c162-5f38-425b-930f-63254455beb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.133391] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 816.133391] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522b9361-0e1e-db38-e40e-801910de5681" [ 816.133391] env[62383]: _type = "Task" [ 816.133391] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.141564] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522b9361-0e1e-db38-e40e-801910de5681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.189559] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451669, 'name': PowerOffVM_Task, 'duration_secs': 0.211884} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.189970] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 816.190367] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance '93234e99-268f-491e-96bd-a77f4c9f164b' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 816.263703] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.263892] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.264115] env[62383]: DEBUG nova.network.neutron [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 816.264290] env[62383]: DEBUG nova.objects.instance [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'info_cache' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 816.278396] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.278662] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.278949] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.279163] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.279716] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.281506] env[62383]: INFO nova.compute.manager [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Terminating instance [ 816.492196] env[62383]: DEBUG oslo_vmware.api [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451668, 'name': RemoveSnapshot_Task, 'duration_secs': 1.035774} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.492196] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 816.492615] env[62383]: INFO nova.compute.manager [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Took 16.12 seconds to snapshot the instance on the hypervisor. [ 816.628276] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.628853] env[62383]: DEBUG nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.631799] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.991s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.633348] env[62383]: INFO nova.compute.claims [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.645473] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522b9361-0e1e-db38-e40e-801910de5681, 'name': SearchDatastore_Task, 'duration_secs': 0.043223} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.645800] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.646158] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 12e6baef-0614-4a12-b958-30b0f56fe486/12e6baef-0614-4a12-b958-30b0f56fe486.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 816.646462] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb4bc50f-6238-45db-92c1-c3e81610ff2d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.653817] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 816.653817] env[62383]: value = "task-2451670" [ 816.653817] env[62383]: _type = "Task" [ 816.653817] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.663208] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451670, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.698828] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.699261] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.699261] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.699479] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.699643] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.699880] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.700179] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.700365] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.700564] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.700736] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.700905] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.709147] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cb587fa-4b65-4607-8ad9-b436627578da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.730414] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 816.730414] env[62383]: value = "task-2451671" [ 816.730414] env[62383]: _type = "Task" [ 816.730414] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.738267] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451671, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.756355] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.756618] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.787995] env[62383]: DEBUG nova.compute.manager [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 816.787995] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 816.789016] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a23179-a87c-40ca-b8db-c946d6f422c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.799782] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 816.800149] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aad807b5-6f6d-4670-a57a-77e07d1f4e5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.862881] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 816.862881] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 816.862881] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleting the datastore file [datastore2] a68610a6-f684-4cc9-8dd4-8b90d2d379da {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.863185] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-191990a4-14dd-4a3e-a212-9c8cbc6fc818 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.870161] env[62383]: DEBUG oslo_vmware.api [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 816.870161] env[62383]: value = "task-2451673" [ 816.870161] env[62383]: _type = "Task" [ 816.870161] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.877951] env[62383]: DEBUG oslo_vmware.api [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.997035] env[62383]: DEBUG nova.compute.manager [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Instance disappeared during snapshot {{(pid=62383) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 817.011918] env[62383]: DEBUG nova.compute.manager [None req-cf05900f-c6fd-4095-8b57-b82bceaeac5c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image not found during clean up ee3729ee-5379-458f-8542-4fd2440f6cf5 {{(pid=62383) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 817.138026] env[62383]: DEBUG nova.compute.utils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 817.140184] env[62383]: DEBUG nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 817.140421] env[62383]: DEBUG nova.network.neutron [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.169327] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451670, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.191390] env[62383]: DEBUG nova.policy [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7111f98afebe43d48615445b7fd4596d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28e37dc42ac74824b43bd4b120a52674', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.242128] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451671, 'name': ReconfigVM_Task, 'duration_secs': 0.253745} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.242400] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance '93234e99-268f-491e-96bd-a77f4c9f164b' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 817.259482] env[62383]: DEBUG nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 817.383513] env[62383]: DEBUG oslo_vmware.api [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.444709} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.383775] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 817.384104] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 817.384414] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 817.384726] env[62383]: INFO nova.compute.manager [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Took 0.60 seconds to destroy the instance on the hypervisor. [ 817.385135] env[62383]: DEBUG oslo.service.loopingcall [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.385433] env[62383]: DEBUG nova.compute.manager [-] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 817.385658] env[62383]: DEBUG nova.network.neutron [-] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 817.590096] env[62383]: DEBUG nova.network.neutron [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Successfully created port: df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.592779] env[62383]: DEBUG nova.network.neutron [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.647290] env[62383]: DEBUG nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.667062] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451670, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649122} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.667364] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 12e6baef-0614-4a12-b958-30b0f56fe486/12e6baef-0614-4a12-b958-30b0f56fe486.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 817.667626] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 817.667913] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d7b879c-f082-498c-ae45-7e788c3ed6f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.678105] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 817.678105] env[62383]: value = "task-2451674" [ 817.678105] env[62383]: _type = "Task" [ 817.678105] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.686578] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451674, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.761854] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 817.763110] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.763110] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 817.763110] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.763110] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 817.763110] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 817.764047] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 817.764162] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 817.764428] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 817.764668] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 817.765255] env[62383]: DEBUG nova.virt.hardware [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 817.770604] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Reconfiguring VM instance instance-00000037 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 817.777546] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57e510b2-79bb-4f2e-b499-0dabdd19043a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.800914] env[62383]: DEBUG nova.compute.manager [req-dddda039-49b5-40f2-8465-a4fcc8a2ec3c req-ea7cd792-cb14-41ec-8a7e-213238533a96 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Received event network-vif-deleted-9a3f1093-bdac-4a2c-8938-e0953e605535 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 817.801152] env[62383]: INFO nova.compute.manager [req-dddda039-49b5-40f2-8465-a4fcc8a2ec3c req-ea7cd792-cb14-41ec-8a7e-213238533a96 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Neutron deleted interface 9a3f1093-bdac-4a2c-8938-e0953e605535; detaching it from the instance and deleting it from the info cache [ 817.801619] env[62383]: DEBUG nova.network.neutron [req-dddda039-49b5-40f2-8465-a4fcc8a2ec3c req-ea7cd792-cb14-41ec-8a7e-213238533a96 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.808667] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 817.808667] env[62383]: value = "task-2451675" [ 817.808667] env[62383]: _type = "Task" [ 817.808667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.815954] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.822038] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451675, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.095590] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.096020] env[62383]: DEBUG nova.objects.instance [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'migration_context' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.149543] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5e6402-277b-4686-a684-a7a849348116 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.159958] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f749a0-1f03-4ceb-b309-d794d329d500 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.192644] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75cdbf9-66cd-4e24-89c9-9952c24a69ff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.202625] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160f8864-f1a2-42bf-a1dd-8b00c50c6188 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.206119] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451674, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124238} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.206603] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.207600] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d199d4e3-b032-465b-b2a2-bc8a7ee4d1f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.218993] env[62383]: DEBUG nova.compute.provider_tree [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.239296] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 12e6baef-0614-4a12-b958-30b0f56fe486/12e6baef-0614-4a12-b958-30b0f56fe486.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.240209] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d147fe26-7b5a-4db7-a367-48884c99022d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.258672] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 818.258672] env[62383]: value = "task-2451676" [ 818.258672] env[62383]: _type = "Task" [ 818.258672] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.267311] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451676, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.300945] env[62383]: DEBUG nova.network.neutron [-] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.305876] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87d52bc0-a30b-4d56-97b1-dc82608c2f67 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.316598] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35600fbd-f524-457f-8003-c2462a3c82b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.330723] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451675, 'name': ReconfigVM_Task, 'duration_secs': 0.213369} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.331417] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Reconfigured VM instance instance-00000037 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 818.332207] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fb6e2f-a5ed-4148-bf30-bd916cd123f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.355093] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 93234e99-268f-491e-96bd-a77f4c9f164b/93234e99-268f-491e-96bd-a77f4c9f164b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.373039] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c6f0d54-458f-4a8b-95a5-281af2bd6e3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.386390] env[62383]: DEBUG nova.compute.manager [req-dddda039-49b5-40f2-8465-a4fcc8a2ec3c req-ea7cd792-cb14-41ec-8a7e-213238533a96 service nova] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Detach interface failed, port_id=9a3f1093-bdac-4a2c-8938-e0953e605535, reason: Instance a68610a6-f684-4cc9-8dd4-8b90d2d379da could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 818.391817] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 818.391817] env[62383]: value = "task-2451677" [ 818.391817] env[62383]: _type = "Task" [ 818.391817] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.399374] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.598795] env[62383]: DEBUG nova.objects.base [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Object Instance<8a165d96-f503-4bc5-bff4-e6a85201e137> lazy-loaded attributes: info_cache,migration_context {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 818.599759] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b96c23-36ce-4a27-aa97-b9e33c3aa694 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.620027] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-132dbf03-ab08-43f1-b1a6-6ff1d7972105 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.625872] env[62383]: DEBUG oslo_vmware.api [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 818.625872] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5289daf5-d339-f6b0-5328-b4d70878b568" [ 818.625872] env[62383]: _type = "Task" [ 818.625872] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.633922] env[62383]: DEBUG oslo_vmware.api [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5289daf5-d339-f6b0-5328-b4d70878b568, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.660390] env[62383]: DEBUG nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.687462] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.687704] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.687866] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.688060] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.688212] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.688362] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.688569] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.688760] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.688935] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.689118] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.689298] env[62383]: DEBUG nova.virt.hardware [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.690210] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96977455-7c3d-4724-91c0-098f61ecc963 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.697675] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263b4de8-90c0-4aa9-b05a-c6d8f003245e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.722079] env[62383]: DEBUG nova.scheduler.client.report [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.768644] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.803626] env[62383]: INFO nova.compute.manager [-] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Took 1.42 seconds to deallocate network for instance. [ 818.901985] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451677, 'name': ReconfigVM_Task, 'duration_secs': 0.299657} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.902294] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 93234e99-268f-491e-96bd-a77f4c9f164b/93234e99-268f-491e-96bd-a77f4c9f164b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 818.902550] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance '93234e99-268f-491e-96bd-a77f4c9f164b' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 819.117052] env[62383]: DEBUG nova.network.neutron [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Successfully updated port: df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.136879] env[62383]: DEBUG oslo_vmware.api [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5289daf5-d339-f6b0-5328-b4d70878b568, 'name': SearchDatastore_Task, 'duration_secs': 0.008413} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.137196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.226905] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.227480] env[62383]: DEBUG nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 819.230257] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.642s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.230482] env[62383]: DEBUG nova.objects.instance [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lazy-loading 'resources' on Instance uuid bc1e1f0c-a86d-4d31-a8c4-45d362e9b807 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.270421] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451676, 'name': ReconfigVM_Task, 'duration_secs': 0.841044} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.270702] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 12e6baef-0614-4a12-b958-30b0f56fe486/12e6baef-0614-4a12-b958-30b0f56fe486.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.271338] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2851815-6d14-4b1a-80df-854acbe5bf7f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.280995] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 819.280995] env[62383]: value = "task-2451678" [ 819.280995] env[62383]: _type = "Task" [ 819.280995] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.290688] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451678, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.310258] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.409386] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75134660-b945-49ae-b936-034c47459181 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.428811] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b8dbd7-0a3a-4a5a-8531-3c4d13fcf582 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.448058] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance '93234e99-268f-491e-96bd-a77f4c9f164b' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 819.619930] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.620191] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.620272] env[62383]: DEBUG nova.network.neutron [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.734307] env[62383]: DEBUG nova.compute.utils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 819.738927] env[62383]: DEBUG nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.739114] env[62383]: DEBUG nova.network.neutron [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.783696] env[62383]: DEBUG nova.policy [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4de9dec9c1d2474eb611f4a2623d602d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aead8ea1d1de4d0d8d8c07dec519d8b4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.793755] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451678, 'name': Rename_Task, 'duration_secs': 0.20672} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.796161] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 819.796482] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b429179-378f-4b78-a218-ff4330132763 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.803867] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 819.803867] env[62383]: value = "task-2451679" [ 819.803867] env[62383]: _type = "Task" [ 819.803867] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.813388] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.824847] env[62383]: DEBUG nova.compute.manager [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Received event network-vif-plugged-df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 819.825077] env[62383]: DEBUG oslo_concurrency.lockutils [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] Acquiring lock "6fda89ec-aee1-4c1e-b005-51a9742abb19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.825284] env[62383]: DEBUG oslo_concurrency.lockutils [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.825449] env[62383]: DEBUG oslo_concurrency.lockutils [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.825613] env[62383]: DEBUG nova.compute.manager [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] No waiting events found dispatching network-vif-plugged-df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 819.825804] env[62383]: WARNING nova.compute.manager [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Received unexpected event network-vif-plugged-df613873-27c1-4191-ad83-7321eb499e0b for instance with vm_state building and task_state spawning. [ 819.825907] env[62383]: DEBUG nova.compute.manager [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Received event network-changed-df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 819.826448] env[62383]: DEBUG nova.compute.manager [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Refreshing instance network info cache due to event network-changed-df613873-27c1-4191-ad83-7321eb499e0b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 819.826627] env[62383]: DEBUG oslo_concurrency.lockutils [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] Acquiring lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.005100] env[62383]: DEBUG nova.network.neutron [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Port 79458cb2-668a-4c04-882f-c00f465ccd9d binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 820.103472] env[62383]: DEBUG nova.network.neutron [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Successfully created port: 845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.167498] env[62383]: DEBUG nova.network.neutron [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.185246] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb9487b-483b-4e39-b445-19af447a4d22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.195185] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bad3e43-86a4-45d7-ba06-a2cff5aa0c87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.226260] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec04735-2546-4129-ad91-d11f3e01f808 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.235007] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada31dd2-741c-4594-a65d-bdf27cf3a952 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.239176] env[62383]: DEBUG nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.251314] env[62383]: DEBUG nova.compute.provider_tree [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.314744] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451679, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.375902] env[62383]: DEBUG nova.network.neutron [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updating instance_info_cache with network_info: [{"id": "df613873-27c1-4191-ad83-7321eb499e0b", "address": "fa:16:3e:8b:10:34", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf613873-27", "ovs_interfaceid": "df613873-27c1-4191-ad83-7321eb499e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.753699] env[62383]: DEBUG nova.scheduler.client.report [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.817122] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451679, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.880991] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Releasing lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.881397] env[62383]: DEBUG nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Instance network_info: |[{"id": "df613873-27c1-4191-ad83-7321eb499e0b", "address": "fa:16:3e:8b:10:34", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf613873-27", "ovs_interfaceid": "df613873-27c1-4191-ad83-7321eb499e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.881720] env[62383]: DEBUG oslo_concurrency.lockutils [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] Acquired lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.881900] env[62383]: DEBUG nova.network.neutron [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Refreshing network info cache for port df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.883152] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:10:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec46b14d-3310-4f2b-96c1-f53ee47d3759', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df613873-27c1-4191-ad83-7321eb499e0b', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.891449] env[62383]: DEBUG oslo.service.loopingcall [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.893449] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.893689] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ea2e6f6-b67e-470d-9545-3b55b6da7b3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.914514] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.914514] env[62383]: value = "task-2451680" [ 820.914514] env[62383]: _type = "Task" [ 820.914514] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.922624] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451680, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.030815] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.030815] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.030815] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.248899] env[62383]: DEBUG nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 821.258483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.028s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.260839] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.497s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.262347] env[62383]: INFO nova.compute.claims [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.276271] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.276271] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.276271] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.276506] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.276506] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.276599] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.277228] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.277228] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.277228] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.277355] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.277506] env[62383]: DEBUG nova.virt.hardware [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.278436] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87eb92e8-10a2-47d9-a83d-26fa3987a552 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.281882] env[62383]: INFO nova.scheduler.client.report [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Deleted allocations for instance bc1e1f0c-a86d-4d31-a8c4-45d362e9b807 [ 821.289401] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fb2f67-7169-47b7-9843-272e641bd245 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.314678] env[62383]: DEBUG oslo_vmware.api [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451679, 'name': PowerOnVM_Task, 'duration_secs': 1.147458} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.314971] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.315214] env[62383]: INFO nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Took 9.22 seconds to spawn the instance on the hypervisor. [ 821.315412] env[62383]: DEBUG nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.316311] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f90e81c-c577-44a1-958a-3c32c802f7f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.424656] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451680, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.679839] env[62383]: DEBUG nova.network.neutron [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updated VIF entry in instance network info cache for port df613873-27c1-4191-ad83-7321eb499e0b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.680423] env[62383]: DEBUG nova.network.neutron [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updating instance_info_cache with network_info: [{"id": "df613873-27c1-4191-ad83-7321eb499e0b", "address": "fa:16:3e:8b:10:34", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf613873-27", "ovs_interfaceid": "df613873-27c1-4191-ad83-7321eb499e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.791050] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8d5bbfd-18e6-4fa5-a95c-98db1e8a82bd tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "bc1e1f0c-a86d-4d31-a8c4-45d362e9b807" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.883s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.810958] env[62383]: DEBUG nova.network.neutron [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Successfully updated port: 845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.833709] env[62383]: INFO nova.compute.manager [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Took 55.91 seconds to build instance. [ 821.852566] env[62383]: DEBUG nova.compute.manager [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Received event network-vif-plugged-845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 821.852786] env[62383]: DEBUG oslo_concurrency.lockutils [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] Acquiring lock "1b025655-acad-4b70-9e1a-489683cafb7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.853352] env[62383]: DEBUG oslo_concurrency.lockutils [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] Lock "1b025655-acad-4b70-9e1a-489683cafb7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.853352] env[62383]: DEBUG oslo_concurrency.lockutils [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] Lock "1b025655-acad-4b70-9e1a-489683cafb7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.853352] env[62383]: DEBUG nova.compute.manager [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] No waiting events found dispatching network-vif-plugged-845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.853691] env[62383]: WARNING nova.compute.manager [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Received unexpected event network-vif-plugged-845110d3-620c-4852-8aab-e6907d5b3af2 for instance with vm_state building and task_state spawning. [ 821.853691] env[62383]: DEBUG nova.compute.manager [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Received event network-changed-845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 821.853775] env[62383]: DEBUG nova.compute.manager [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Refreshing instance network info cache due to event network-changed-845110d3-620c-4852-8aab-e6907d5b3af2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 821.854040] env[62383]: DEBUG oslo_concurrency.lockutils [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] Acquiring lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.854205] env[62383]: DEBUG oslo_concurrency.lockutils [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] Acquired lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.854369] env[62383]: DEBUG nova.network.neutron [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Refreshing network info cache for port 845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.925401] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451680, 'name': CreateVM_Task, 'duration_secs': 0.653179} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.925577] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.926256] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.926394] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.926722] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 821.926962] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-146614c8-3ad3-40f7-9006-13c8480e0825 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.931395] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 821.931395] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52847663-a41c-4a29-a2bc-31e13eb95fea" [ 821.931395] env[62383]: _type = "Task" [ 821.931395] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.938943] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52847663-a41c-4a29-a2bc-31e13eb95fea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.072020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.072755] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.072755] env[62383]: DEBUG nova.network.neutron [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 822.185583] env[62383]: DEBUG oslo_concurrency.lockutils [req-44d3500a-06a9-48fd-892c-912a8b3c3911 req-e5b9c85e-9a12-4ec2-9b67-c03c4b4c1b2a service nova] Releasing lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.313150] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.335752] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2dc2132-9718-439d-8bbc-64b7a6b07b6d tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "12e6baef-0614-4a12-b958-30b0f56fe486" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.423s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.405497] env[62383]: DEBUG nova.network.neutron [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.441594] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52847663-a41c-4a29-a2bc-31e13eb95fea, 'name': SearchDatastore_Task, 'duration_secs': 0.011773} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.442093] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.442216] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.443028] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.443028] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.443028] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.443028] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3cde41d-dd94-462b-92e3-d492800b859d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.451390] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.451615] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.452539] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b585cf2-f4a2-44f5-9697-8c0885223022 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.457604] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 822.457604] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526dba72-2f13-ef0c-891c-2b892c9c1824" [ 822.457604] env[62383]: _type = "Task" [ 822.457604] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.471468] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526dba72-2f13-ef0c-891c-2b892c9c1824, 'name': SearchDatastore_Task, 'duration_secs': 0.008852} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.474471] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e1e1cc2-cc3a-476a-9c0a-ac05c174267d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.480460] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 822.480460] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52730817-7dd1-8156-911a-17782d9716ed" [ 822.480460] env[62383]: _type = "Task" [ 822.480460] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.491420] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52730817-7dd1-8156-911a-17782d9716ed, 'name': SearchDatastore_Task, 'duration_secs': 0.009077} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.491670] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.491926] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 6fda89ec-aee1-4c1e-b005-51a9742abb19/6fda89ec-aee1-4c1e-b005-51a9742abb19.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.492195] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a686bc9e-e118-4257-ab68-d34d79efa844 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.498506] env[62383]: DEBUG nova.network.neutron [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.500630] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 822.500630] env[62383]: value = "task-2451681" [ 822.500630] env[62383]: _type = "Task" [ 822.500630] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.509564] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.625375] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "12e6baef-0614-4a12-b958-30b0f56fe486" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.625630] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "12e6baef-0614-4a12-b958-30b0f56fe486" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.625830] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "12e6baef-0614-4a12-b958-30b0f56fe486-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.626436] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "12e6baef-0614-4a12-b958-30b0f56fe486-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.626686] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "12e6baef-0614-4a12-b958-30b0f56fe486-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.630959] env[62383]: INFO nova.compute.manager [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Terminating instance [ 822.780436] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60164121-828c-4878-a7e1-dae33700e263 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.791158] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81435ec9-e437-4e86-b9f8-fdb283a5ae45 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.826279] env[62383]: DEBUG nova.network.neutron [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance_info_cache with network_info: [{"id": "79458cb2-668a-4c04-882f-c00f465ccd9d", "address": "fa:16:3e:06:eb:ec", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79458cb2-66", "ovs_interfaceid": "79458cb2-668a-4c04-882f-c00f465ccd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.828120] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4fb319-07bb-4717-ba62-e6863db2c8c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.839052] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa0d9b0-1c06-4f1c-9142-51d0175ace12 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.856252] env[62383]: DEBUG nova.compute.provider_tree [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.005087] env[62383]: DEBUG oslo_concurrency.lockutils [req-ed4ea8bf-804c-427f-88e9-7efaeba699aa req-08fec8fb-d0cb-485c-bccd-198185b0fbe1 service nova] Releasing lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.005477] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.005636] env[62383]: DEBUG nova.network.neutron [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.012536] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459605} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.012774] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 6fda89ec-aee1-4c1e-b005-51a9742abb19/6fda89ec-aee1-4c1e-b005-51a9742abb19.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.013075] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.013839] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42814232-9816-4d20-8432-21bb905c2def {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.021083] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 823.021083] env[62383]: value = "task-2451682" [ 823.021083] env[62383]: _type = "Task" [ 823.021083] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.029587] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.136864] env[62383]: DEBUG nova.compute.manager [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 823.137140] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.138046] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2267ff99-ad86-405f-90c8-af4eb6364971 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.145960] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.145960] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76dbb678-a766-45ec-839e-24a1a02d6949 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.152303] env[62383]: DEBUG oslo_vmware.api [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 823.152303] env[62383]: value = "task-2451683" [ 823.152303] env[62383]: _type = "Task" [ 823.152303] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.161424] env[62383]: DEBUG oslo_vmware.api [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.332346] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.359529] env[62383]: DEBUG nova.scheduler.client.report [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.391265] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.391540] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.391750] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.391934] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.392124] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.396515] env[62383]: INFO nova.compute.manager [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Terminating instance [ 823.530603] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.217374} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.530880] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.531723] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b47079e-00b1-4297-a7e0-9b74ca019233 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.553704] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 6fda89ec-aee1-4c1e-b005-51a9742abb19/6fda89ec-aee1-4c1e-b005-51a9742abb19.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.554571] env[62383]: DEBUG nova.network.neutron [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.556341] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b055363a-f0ed-4352-b073-941bc636ea22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.576074] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 823.576074] env[62383]: value = "task-2451684" [ 823.576074] env[62383]: _type = "Task" [ 823.576074] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.583929] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451684, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.662994] env[62383]: DEBUG oslo_vmware.api [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451683, 'name': PowerOffVM_Task, 'duration_secs': 0.211137} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.663285] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.663456] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.663711] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dfe286d7-f750-42ff-9386-a3800a76c519 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.707532] env[62383]: DEBUG nova.network.neutron [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updating instance_info_cache with network_info: [{"id": "845110d3-620c-4852-8aab-e6907d5b3af2", "address": "fa:16:3e:0d:f6:13", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845110d3-62", "ovs_interfaceid": "845110d3-620c-4852-8aab-e6907d5b3af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.728521] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 823.728760] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 823.728928] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Deleting the datastore file [datastore1] 12e6baef-0614-4a12-b958-30b0f56fe486 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.729175] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3c6716d-c859-42ac-b7da-677aea229694 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.734755] env[62383]: DEBUG oslo_vmware.api [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for the task: (returnval){ [ 823.734755] env[62383]: value = "task-2451686" [ 823.734755] env[62383]: _type = "Task" [ 823.734755] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.742467] env[62383]: DEBUG oslo_vmware.api [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.859582] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca109816-22d9-4613-9a8a-05affa2d3cb6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.864141] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.864632] env[62383]: DEBUG nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.867165] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 27.329s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.885642] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e4f7e1-3f8f-4f54-a94a-ee37360b588f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.893438] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance '93234e99-268f-491e-96bd-a77f4c9f164b' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 823.899846] env[62383]: DEBUG nova.compute.manager [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 823.900062] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.901522] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f6c059-24eb-4df5-80b5-a261633f2071 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.909327] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.909541] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c54ae16-d817-4573-b0d1-b4f744dbd316 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.916447] env[62383]: DEBUG oslo_vmware.api [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 823.916447] env[62383]: value = "task-2451687" [ 823.916447] env[62383]: _type = "Task" [ 823.916447] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.924320] env[62383]: DEBUG oslo_vmware.api [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451687, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.085931] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451684, 'name': ReconfigVM_Task, 'duration_secs': 0.350627} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.086285] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 6fda89ec-aee1-4c1e-b005-51a9742abb19/6fda89ec-aee1-4c1e-b005-51a9742abb19.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.086942] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ef5a991-e5da-4a9a-bc4c-8073e332cae4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.093417] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 824.093417] env[62383]: value = "task-2451688" [ 824.093417] env[62383]: _type = "Task" [ 824.093417] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.102311] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451688, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.209809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.210131] env[62383]: DEBUG nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Instance network_info: |[{"id": "845110d3-620c-4852-8aab-e6907d5b3af2", "address": "fa:16:3e:0d:f6:13", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845110d3-62", "ovs_interfaceid": "845110d3-620c-4852-8aab-e6907d5b3af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 824.210692] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:f6:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '845110d3-620c-4852-8aab-e6907d5b3af2', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.218172] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating folder: Project (aead8ea1d1de4d0d8d8c07dec519d8b4). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.218462] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3319e42-a433-427f-9507-45be642a5453 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.229251] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created folder: Project (aead8ea1d1de4d0d8d8c07dec519d8b4) in parent group-v496304. [ 824.229438] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating folder: Instances. Parent ref: group-v496486. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.229674] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69e03283-7d53-478d-bc14-4e6700c01808 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.244526] env[62383]: DEBUG oslo_vmware.api [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Task: {'id': task-2451686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131016} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.245700] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.245887] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.246102] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.246287] env[62383]: INFO nova.compute.manager [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Took 1.11 seconds to destroy the instance on the hypervisor. [ 824.246529] env[62383]: DEBUG oslo.service.loopingcall [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.246762] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created folder: Instances in parent group-v496486. [ 824.246967] env[62383]: DEBUG oslo.service.loopingcall [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.247172] env[62383]: DEBUG nova.compute.manager [-] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 824.247268] env[62383]: DEBUG nova.network.neutron [-] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.248939] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.249217] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8f67d05-09b2-43da-9bb0-9b202b563792 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.267884] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.267884] env[62383]: value = "task-2451691" [ 824.267884] env[62383]: _type = "Task" [ 824.267884] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.274956] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451691, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.371477] env[62383]: DEBUG nova.compute.utils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 824.375030] env[62383]: DEBUG nova.objects.instance [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lazy-loading 'migration_context' on Instance uuid 9604eadf-a027-46dd-989b-0d4b752f883a {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 824.375030] env[62383]: DEBUG nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 824.375030] env[62383]: DEBUG nova.network.neutron [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.400184] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.400730] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1c76f06-3c03-4cd2-96e7-7a230a3ae993 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.407151] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 824.407151] env[62383]: value = "task-2451692" [ 824.407151] env[62383]: _type = "Task" [ 824.407151] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.417582] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.427365] env[62383]: DEBUG oslo_vmware.api [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451687, 'name': PowerOffVM_Task, 'duration_secs': 0.349576} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.427607] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.427780] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.429161] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ea86602-f562-444f-89b9-121e9095e4b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.447349] env[62383]: DEBUG nova.policy [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db7e9998210e485fa855f0375f63ad55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35016a724e7e4fa2b0fc19396d8e736b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.497867] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 824.498110] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 824.499577] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Deleting the datastore file [datastore1] 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.499577] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-850665bc-0edf-4a74-9020-92d358519edf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.505569] env[62383]: DEBUG oslo_vmware.api [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for the task: (returnval){ [ 824.505569] env[62383]: value = "task-2451694" [ 824.505569] env[62383]: _type = "Task" [ 824.505569] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.516934] env[62383]: DEBUG oslo_vmware.api [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451694, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.607019] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451688, 'name': Rename_Task, 'duration_secs': 0.185087} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.607019] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.607019] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edac5f6c-01c8-4f0c-8f17-0fc0395d2888 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.612075] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 824.612075] env[62383]: value = "task-2451695" [ 824.612075] env[62383]: _type = "Task" [ 824.612075] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.625433] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451695, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.740999] env[62383]: DEBUG nova.compute.manager [req-26359ddc-5f33-4e75-a71a-e0db25de182f req-e21bf5ba-8d97-481c-a097-01058a0f0848 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Received event network-vif-deleted-2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 824.741262] env[62383]: INFO nova.compute.manager [req-26359ddc-5f33-4e75-a71a-e0db25de182f req-e21bf5ba-8d97-481c-a097-01058a0f0848 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Neutron deleted interface 2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c; detaching it from the instance and deleting it from the info cache [ 824.741754] env[62383]: DEBUG nova.network.neutron [req-26359ddc-5f33-4e75-a71a-e0db25de182f req-e21bf5ba-8d97-481c-a097-01058a0f0848 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.781770] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451691, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.812816] env[62383]: DEBUG nova.network.neutron [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Successfully created port: 59b6508c-9775-4b70-8003-690acbbb3e9b {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.875651] env[62383]: DEBUG nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 825.601420] env[62383]: DEBUG nova.network.neutron [-] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.612851] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2597caba-0955-4fbb-b123-a6272ad07409 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.621978] env[62383]: DEBUG oslo_vmware.api [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451692, 'name': PowerOnVM_Task, 'duration_secs': 0.637006} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.630536] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.630733] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f994058c-3783-411e-bb8f-7d4925ce96b5 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance '93234e99-268f-491e-96bd-a77f4c9f164b' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 825.634257] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451691, 'name': CreateVM_Task, 'duration_secs': 0.547853} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.634736] env[62383]: DEBUG oslo_vmware.api [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451695, 'name': PowerOnVM_Task, 'duration_secs': 0.915919} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.634948] env[62383]: DEBUG oslo_vmware.api [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Task: {'id': task-2451694, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152912} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.640045] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8acead9-9d7f-478d-a7dd-0936356a62f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.650382] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 825.651365] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.651365] env[62383]: INFO nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Took 6.99 seconds to spawn the instance on the hypervisor. [ 825.651365] env[62383]: DEBUG nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.651365] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.651365] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.651909] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.651909] env[62383]: INFO nova.compute.manager [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Took 1.75 seconds to destroy the instance on the hypervisor. [ 825.651909] env[62383]: DEBUG oslo.service.loopingcall [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.653210] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.653369] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.653899] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 825.654418] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8205a80d-ed0b-4ca2-a644-35135d44eb94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.657068] env[62383]: DEBUG nova.compute.manager [-] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 825.657162] env[62383]: DEBUG nova.network.neutron [-] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 825.658975] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-546d5573-7f6b-4b26-9ba0-13685bdbda9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.670670] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 825.670670] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ba9e66-a0aa-a3c2-19ee-509a2044725b" [ 825.670670] env[62383]: _type = "Task" [ 825.670670] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.695598] env[62383]: DEBUG nova.compute.manager [req-26359ddc-5f33-4e75-a71a-e0db25de182f req-e21bf5ba-8d97-481c-a097-01058a0f0848 service nova] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Detach interface failed, port_id=2422df8e-cbd8-4a5d-94c7-c19d1b28cb6c, reason: Instance 12e6baef-0614-4a12-b958-30b0f56fe486 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 825.702415] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ba9e66-a0aa-a3c2-19ee-509a2044725b, 'name': SearchDatastore_Task, 'duration_secs': 0.010323} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.702815] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.706017] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.706017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.706017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.706017] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.706017] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5751a8f-4fa9-465b-a262-9018b15c858f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.716849] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.717062] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.718310] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8dcb82e-bacd-4784-9abb-9608e4b1c4d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.726800] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 825.726800] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522140fd-240b-86ac-9e76-c9617369f40e" [ 825.726800] env[62383]: _type = "Task" [ 825.726800] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.736549] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522140fd-240b-86ac-9e76-c9617369f40e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.107238] env[62383]: INFO nova.compute.manager [-] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Took 1.86 seconds to deallocate network for instance. [ 826.124580] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5095610-171b-49b6-b9ec-7e8f72c09081 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.132092] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b56ec9-0463-4a5f-a839-e15f90708012 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.165268] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bb5f12-b091-46e6-9589-9da89813714c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.172898] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd5a56f-d18b-40a1-ac55-4e3debd8f46f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.190956] env[62383]: DEBUG nova.compute.provider_tree [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.192450] env[62383]: INFO nova.compute.manager [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Took 38.65 seconds to build instance. [ 826.236595] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522140fd-240b-86ac-9e76-c9617369f40e, 'name': SearchDatastore_Task, 'duration_secs': 0.009644} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.237404] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a10ae2a-683e-401c-af66-714d1f40225e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.243141] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 826.243141] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fd0c7f-cf24-8979-83d1-1edc86f00604" [ 826.243141] env[62383]: _type = "Task" [ 826.243141] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.251883] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fd0c7f-cf24-8979-83d1-1edc86f00604, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.270812] env[62383]: DEBUG nova.network.neutron [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Successfully updated port: 59b6508c-9775-4b70-8003-690acbbb3e9b {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.438017] env[62383]: DEBUG nova.network.neutron [-] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.608920] env[62383]: DEBUG nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 826.611667] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.634110] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 826.634353] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.634509] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 826.634688] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.634832] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 826.634979] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 826.635264] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 826.635425] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 826.635590] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 826.635835] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 826.636042] env[62383]: DEBUG nova.virt.hardware [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 826.636881] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5621ef01-6f1a-4b36-b4e3-82c4fa8d295f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.644960] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01c03fe-bf65-43b2-a4f4-4255b94e488e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.698133] env[62383]: DEBUG nova.scheduler.client.report [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 826.707026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-779fdc9e-a058-47fb-abf7-7883c3a6388e tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.176s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.754728] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fd0c7f-cf24-8979-83d1-1edc86f00604, 'name': SearchDatastore_Task, 'duration_secs': 0.0087} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.755105] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.755440] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1b025655-acad-4b70-9e1a-489683cafb7e/1b025655-acad-4b70-9e1a-489683cafb7e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 826.755975] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18b61aa3-1028-4b68-bf6b-3174e2f461ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.762555] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 826.762555] env[62383]: value = "task-2451696" [ 826.762555] env[62383]: _type = "Task" [ 826.762555] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.770331] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451696, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.773031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "refresh_cache-bc37e114-cf55-408b-9841-05eaf411b4f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.773274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "refresh_cache-bc37e114-cf55-408b-9841-05eaf411b4f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.773479] env[62383]: DEBUG nova.network.neutron [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.837038] env[62383]: DEBUG nova.compute.manager [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Received event network-vif-deleted-c23968b2-dbec-433d-8bcc-80644a89ec08 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 826.837251] env[62383]: DEBUG nova.compute.manager [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Received event network-vif-plugged-59b6508c-9775-4b70-8003-690acbbb3e9b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 826.837441] env[62383]: DEBUG oslo_concurrency.lockutils [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] Acquiring lock "bc37e114-cf55-408b-9841-05eaf411b4f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.838137] env[62383]: DEBUG oslo_concurrency.lockutils [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.838566] env[62383]: DEBUG oslo_concurrency.lockutils [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.838566] env[62383]: DEBUG nova.compute.manager [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] No waiting events found dispatching network-vif-plugged-59b6508c-9775-4b70-8003-690acbbb3e9b {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.838802] env[62383]: WARNING nova.compute.manager [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Received unexpected event network-vif-plugged-59b6508c-9775-4b70-8003-690acbbb3e9b for instance with vm_state building and task_state spawning. [ 826.839027] env[62383]: DEBUG nova.compute.manager [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Received event network-changed-59b6508c-9775-4b70-8003-690acbbb3e9b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 826.839233] env[62383]: DEBUG nova.compute.manager [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Refreshing instance network info cache due to event network-changed-59b6508c-9775-4b70-8003-690acbbb3e9b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 826.839493] env[62383]: DEBUG oslo_concurrency.lockutils [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] Acquiring lock "refresh_cache-bc37e114-cf55-408b-9841-05eaf411b4f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.941330] env[62383]: INFO nova.compute.manager [-] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Took 1.28 seconds to deallocate network for instance. [ 826.966432] env[62383]: DEBUG nova.compute.manager [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Received event network-changed-df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 826.966637] env[62383]: DEBUG nova.compute.manager [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Refreshing instance network info cache due to event network-changed-df613873-27c1-4191-ad83-7321eb499e0b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 826.966896] env[62383]: DEBUG oslo_concurrency.lockutils [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] Acquiring lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.967550] env[62383]: DEBUG oslo_concurrency.lockutils [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] Acquired lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.967796] env[62383]: DEBUG nova.network.neutron [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Refreshing network info cache for port df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.274401] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451696, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451843} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.274712] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1b025655-acad-4b70-9e1a-489683cafb7e/1b025655-acad-4b70-9e1a-489683cafb7e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.274932] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.277070] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-956bbc1b-3546-4ab0-bfdb-93b72ff1996f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.283977] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 827.283977] env[62383]: value = "task-2451697" [ 827.283977] env[62383]: _type = "Task" [ 827.283977] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.292951] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451697, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.307259] env[62383]: DEBUG nova.network.neutron [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.450657] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.452214] env[62383]: DEBUG nova.network.neutron [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Updating instance_info_cache with network_info: [{"id": "59b6508c-9775-4b70-8003-690acbbb3e9b", "address": "fa:16:3e:4b:59:5e", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59b6508c-97", "ovs_interfaceid": "59b6508c-9775-4b70-8003-690acbbb3e9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.660663] env[62383]: DEBUG nova.network.neutron [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updated VIF entry in instance network info cache for port df613873-27c1-4191-ad83-7321eb499e0b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.661101] env[62383]: DEBUG nova.network.neutron [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updating instance_info_cache with network_info: [{"id": "df613873-27c1-4191-ad83-7321eb499e0b", "address": "fa:16:3e:8b:10:34", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf613873-27", "ovs_interfaceid": "df613873-27c1-4191-ad83-7321eb499e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.687883] env[62383]: DEBUG oslo_concurrency.lockutils [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.688211] env[62383]: DEBUG oslo_concurrency.lockutils [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.688463] env[62383]: INFO nova.compute.manager [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Rebooting instance [ 827.711453] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.844s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.717196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 27.032s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.798936] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451697, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063899} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.799226] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.800234] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaefd939-5ef6-467f-84de-df0565dff90a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.824592] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 1b025655-acad-4b70-9e1a-489683cafb7e/1b025655-acad-4b70-9e1a-489683cafb7e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.826795] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd302779-cea3-4bf5-bd79-b62f42e15c13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.850847] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 827.850847] env[62383]: value = "task-2451698" [ 827.850847] env[62383]: _type = "Task" [ 827.850847] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.859680] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451698, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.954841] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "refresh_cache-bc37e114-cf55-408b-9841-05eaf411b4f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.955213] env[62383]: DEBUG nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Instance network_info: |[{"id": "59b6508c-9775-4b70-8003-690acbbb3e9b", "address": "fa:16:3e:4b:59:5e", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59b6508c-97", "ovs_interfaceid": "59b6508c-9775-4b70-8003-690acbbb3e9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 827.955524] env[62383]: DEBUG oslo_concurrency.lockutils [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] Acquired lock "refresh_cache-bc37e114-cf55-408b-9841-05eaf411b4f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.955704] env[62383]: DEBUG nova.network.neutron [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Refreshing network info cache for port 59b6508c-9775-4b70-8003-690acbbb3e9b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.956929] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:59:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59b6508c-9775-4b70-8003-690acbbb3e9b', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.965097] env[62383]: DEBUG oslo.service.loopingcall [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.965540] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.965768] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9decb2a8-86f5-428c-ae4a-90c152a4fda8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.987520] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.987520] env[62383]: value = "task-2451699" [ 827.987520] env[62383]: _type = "Task" [ 827.987520] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.995448] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451699, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.163771] env[62383]: DEBUG oslo_concurrency.lockutils [req-05d8f1ee-1733-49f1-9191-384caa321979 req-57030b3c-d526-4742-8963-0b85fab8b34c service nova] Releasing lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.213236] env[62383]: DEBUG oslo_concurrency.lockutils [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.213430] env[62383]: DEBUG oslo_concurrency.lockutils [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquired lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.213606] env[62383]: DEBUG nova.network.neutron [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.346990] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "93234e99-268f-491e-96bd-a77f4c9f164b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.347376] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.347619] env[62383]: DEBUG nova.compute.manager [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Going to confirm migration 4 {{(pid=62383) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 828.364368] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.499380] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451699, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.671142] env[62383]: DEBUG nova.network.neutron [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Updated VIF entry in instance network info cache for port 59b6508c-9775-4b70-8003-690acbbb3e9b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 828.671663] env[62383]: DEBUG nova.network.neutron [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Updating instance_info_cache with network_info: [{"id": "59b6508c-9775-4b70-8003-690acbbb3e9b", "address": "fa:16:3e:4b:59:5e", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59b6508c-97", "ovs_interfaceid": "59b6508c-9775-4b70-8003-690acbbb3e9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.736761] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Applying migration context for instance 8a165d96-f503-4bc5-bff4-e6a85201e137 as it has an incoming, in-progress migration de2bbf7b-fb36-4da7-9a39-76edd8e5241b. Migration status is confirming {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 828.737069] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Applying migration context for instance 93234e99-268f-491e-96bd-a77f4c9f164b as it has an incoming, in-progress migration 88f6ac26-e38f-4ff9-9ba8-0b8c72f3e3ab. Migration status is confirming {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 828.740091] env[62383]: INFO nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating resource usage from migration de2bbf7b-fb36-4da7-9a39-76edd8e5241b [ 828.740466] env[62383]: INFO nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating resource usage from migration 88f6ac26-e38f-4ff9-9ba8-0b8c72f3e3ab [ 828.769422] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1a740010-ddd0-4df6-8ae6-02f1ed50137f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.769422] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.769422] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance da16da02-25ab-46f9-9070-9fdde0b3a75e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.769422] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 80821717-f961-49c7-8b79-c152edfdfb94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.769662] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.769662] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2337e9a2-736c-4d58-ac2e-04c8ad813be4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 828.769662] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 152567ba-f24c-4674-b06e-98c76a3da324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.769662] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c2fee51e-3cc9-421c-bfe5-b324a5b14197 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.769817] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.770272] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.770602] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 828.770961] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.771272] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 67f05a2b-f323-4e4a-ac13-7f4745593be0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.773513] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8bd05dac-7aa2-44c5-8752-6045c01d213d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.773513] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1e367665-1d4b-4686-ac79-c946423c1762 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.773513] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance d0311c29-e1ed-446f-a52b-1687b9561740 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.773513] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 4cd9c7be-c5f4-460b-a9e2-e8f778076947 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.773842] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance e41f5c22-44e0-4de8-a4d0-865fe2c6febd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.773842] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 5ef22e87-f73c-47ba-b925-2bd2effe74eb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 828.773842] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a27fcace-4fb3-48fb-946d-b8057f6ee601 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 828.773842] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a68610a6-f684-4cc9-8dd4-8b90d2d379da is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 828.773842] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 282f2c94-7a63-4eef-aa80-7d67d0a0972a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 828.868238] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451698, 'name': ReconfigVM_Task, 'duration_secs': 1.000511} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.868795] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 1b025655-acad-4b70-9e1a-489683cafb7e/1b025655-acad-4b70-9e1a-489683cafb7e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.869624] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ecb9447-9cff-4484-b63e-a96e0fac442f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.875529] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 828.875529] env[62383]: value = "task-2451700" [ 828.875529] env[62383]: _type = "Task" [ 828.875529] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.884774] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451700, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.955914] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.956196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.956408] env[62383]: DEBUG nova.network.neutron [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.956642] env[62383]: DEBUG nova.objects.instance [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lazy-loading 'info_cache' on Instance uuid 93234e99-268f-491e-96bd-a77f4c9f164b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.978256] env[62383]: DEBUG nova.network.neutron [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updating instance_info_cache with network_info: [{"id": "df613873-27c1-4191-ad83-7321eb499e0b", "address": "fa:16:3e:8b:10:34", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf613873-27", "ovs_interfaceid": "df613873-27c1-4191-ad83-7321eb499e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.999107] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451699, 'name': CreateVM_Task, 'duration_secs': 0.685536} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.999288] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.000071] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.000258] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.000605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 829.000888] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c0f1429-56e2-4ed2-89db-a797aa0e77c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.007125] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 829.007125] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52307563-d6d0-fed6-c135-3ddacafb1c81" [ 829.007125] env[62383]: _type = "Task" [ 829.007125] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.014488] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52307563-d6d0-fed6-c135-3ddacafb1c81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.174066] env[62383]: DEBUG oslo_concurrency.lockutils [req-466aa1cd-424d-4f45-baf1-2ad0328961f7 req-31196409-f1d0-4e74-a31c-fef1ba21c4d4 service nova] Releasing lock "refresh_cache-bc37e114-cf55-408b-9841-05eaf411b4f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.273589] env[62383]: INFO nova.compute.manager [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Swapping old allocation on dict_keys(['60615f54-0557-436e-a486-87505bffb4c7']) held by migration e6a5a341-cb15-4a73-bdeb-a5f56a64c08a for instance [ 829.279425] env[62383]: INFO nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance e6a5a341-cb15-4a73-bdeb-a5f56a64c08a has allocations against this compute host but is not found in the database. [ 829.279425] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9604eadf-a027-46dd-989b-0d4b752f883a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 829.279425] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Migration de2bbf7b-fb36-4da7-9a39-76edd8e5241b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 829.279674] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8a165d96-f503-4bc5-bff4-e6a85201e137 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 829.280334] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2f028680-8db4-474a-8f24-880c4702877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 829.280334] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 12e6baef-0614-4a12-b958-30b0f56fe486 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 829.280334] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Migration 88f6ac26-e38f-4ff9-9ba8-0b8c72f3e3ab is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 829.280334] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 93234e99-268f-491e-96bd-a77f4c9f164b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 829.280588] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 6fda89ec-aee1-4c1e-b005-51a9742abb19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 829.280588] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1b025655-acad-4b70-9e1a-489683cafb7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 829.280835] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance bc37e114-cf55-408b-9841-05eaf411b4f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 829.299757] env[62383]: DEBUG nova.scheduler.client.report [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Overwriting current allocation {'allocations': {'60615f54-0557-436e-a486-87505bffb4c7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 93}}, 'project_id': 'a8d1b45dd8d74bf9a01173d57990d06b', 'user_id': '3d374b5a04f94016b0f5aa198b02b40b', 'consumer_generation': 1} on consumer 9604eadf-a027-46dd-989b-0d4b752f883a {{(pid=62383) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 829.380437] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.380678] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquired lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.380824] env[62383]: DEBUG nova.network.neutron [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.387373] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451700, 'name': Rename_Task, 'duration_secs': 0.135743} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.387635] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.387868] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fbc237a-06bb-4cee-81ce-d756fb8669d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.393918] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 829.393918] env[62383]: value = "task-2451701" [ 829.393918] env[62383]: _type = "Task" [ 829.393918] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.402823] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.481033] env[62383]: DEBUG oslo_concurrency.lockutils [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Releasing lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.516883] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52307563-d6d0-fed6-c135-3ddacafb1c81, 'name': SearchDatastore_Task, 'duration_secs': 0.047321} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.517061] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.517287] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.517521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.517690] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.517848] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.518126] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d62570d-2a6c-4f71-852b-c4a5565ef33a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.526608] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.526814] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.527561] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe3f60fd-9f14-4a84-944d-9953765b126e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.532560] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 829.532560] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52309209-28fc-c0ed-96cf-cf0300e38c28" [ 829.532560] env[62383]: _type = "Task" [ 829.532560] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.540694] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52309209-28fc-c0ed-96cf-cf0300e38c28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.783753] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2eba2920-7912-475b-a198-890743aa5255 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 829.903508] env[62383]: DEBUG oslo_vmware.api [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451701, 'name': PowerOnVM_Task, 'duration_secs': 0.465797} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.903733] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.903947] env[62383]: INFO nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Took 8.65 seconds to spawn the instance on the hypervisor. [ 829.904139] env[62383]: DEBUG nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.904855] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751a944d-e660-4c5b-9ba8-5f188998ea2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.984492] env[62383]: DEBUG nova.compute.manager [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.985421] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155a69cf-0727-4f69-944f-124be13667c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.042574] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52309209-28fc-c0ed-96cf-cf0300e38c28, 'name': SearchDatastore_Task, 'duration_secs': 0.010882} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.045679] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6914a732-0c6c-4bbf-b9ba-d4e0ae20405c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.050815] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 830.050815] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52892340-f0e8-6595-00a6-3309d101370f" [ 830.050815] env[62383]: _type = "Task" [ 830.050815] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.058188] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52892340-f0e8-6595-00a6-3309d101370f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.163699] env[62383]: DEBUG nova.network.neutron [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance_info_cache with network_info: [{"id": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "address": "fa:16:3e:9b:eb:a7", "network": {"id": "ccf6b739-15a2-40e3-908e-e81e671581e6", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.24", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "4c10acdaa3604265b23d83059e3de218", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap796c3e3e-48", "ovs_interfaceid": "796c3e3e-48f2-4d7f-8f7d-974f792c4426", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.223415] env[62383]: DEBUG nova.network.neutron [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance_info_cache with network_info: [{"id": "79458cb2-668a-4c04-882f-c00f465ccd9d", "address": "fa:16:3e:06:eb:ec", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79458cb2-66", "ovs_interfaceid": "79458cb2-668a-4c04-882f-c00f465ccd9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.286762] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a04a6a53-cca8-4e15-b840-cb1394e5b188 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.425182] env[62383]: INFO nova.compute.manager [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Took 38.88 seconds to build instance. [ 830.561611] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52892340-f0e8-6595-00a6-3309d101370f, 'name': SearchDatastore_Task, 'duration_secs': 0.009915} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.561937] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.562258] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] bc37e114-cf55-408b-9841-05eaf411b4f5/bc37e114-cf55-408b-9841-05eaf411b4f5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 830.562541] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d01f5023-6d84-48e7-99f6-1f9af4814aaa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.568949] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 830.568949] env[62383]: value = "task-2451702" [ 830.568949] env[62383]: _type = "Task" [ 830.568949] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.576559] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.666215] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Releasing lock "refresh_cache-9604eadf-a027-46dd-989b-0d4b752f883a" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.666737] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.667072] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8bd919c6-bb06-4bd1-b1eb-8cbd7a054221 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.674305] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 830.674305] env[62383]: value = "task-2451703" [ 830.674305] env[62383]: _type = "Task" [ 830.674305] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.689750] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.726687] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-93234e99-268f-491e-96bd-a77f4c9f164b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.727050] env[62383]: DEBUG nova.objects.instance [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lazy-loading 'migration_context' on Instance uuid 93234e99-268f-491e-96bd-a77f4c9f164b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.790294] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 7740a70f-3c95-49aa-b3ec-0e0effd3efcc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.790634] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 26 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 830.790780] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5760MB phys_disk=200GB used_disk=26GB total_vcpus=48 used_vcpus=26 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 830.926652] env[62383]: DEBUG oslo_concurrency.lockutils [None req-09262cc3-7b93-4eb6-a6d2-d3e84c3a3ecc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "1b025655-acad-4b70-9e1a-489683cafb7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.392s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.002800] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380ed2df-9201-4845-90af-90a2c09ba03e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.015531] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Doing hard reboot of VM {{(pid=62383) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 831.015831] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-8d180c16-d72f-45ac-a5e1-18b926b48553 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.025554] env[62383]: DEBUG oslo_vmware.api [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 831.025554] env[62383]: value = "task-2451704" [ 831.025554] env[62383]: _type = "Task" [ 831.025554] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.033618] env[62383]: DEBUG oslo_vmware.api [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451704, 'name': ResetVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.079913] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483016} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.080287] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] bc37e114-cf55-408b-9841-05eaf411b4f5/bc37e114-cf55-408b-9841-05eaf411b4f5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.080527] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.081228] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-403c6ed5-ab63-461b-ae22-7f2b86ec1a81 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.087702] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 831.087702] env[62383]: value = "task-2451705" [ 831.087702] env[62383]: _type = "Task" [ 831.087702] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.098608] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451705, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.129111] env[62383]: DEBUG nova.compute.manager [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Received event network-changed-845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 831.129288] env[62383]: DEBUG nova.compute.manager [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Refreshing instance network info cache due to event network-changed-845110d3-620c-4852-8aab-e6907d5b3af2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 831.129505] env[62383]: DEBUG oslo_concurrency.lockutils [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] Acquiring lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.129646] env[62383]: DEBUG oslo_concurrency.lockutils [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] Acquired lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.129835] env[62383]: DEBUG nova.network.neutron [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Refreshing network info cache for port 845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.184325] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451703, 'name': PowerOffVM_Task, 'duration_secs': 0.246912} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.184592] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 831.185403] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:26:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='4fbcd04d-cb08-4e45-b5c9-0176dc87583e',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1902383657',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.185617] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.185775] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.185958] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.186120] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.186269] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.186468] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.186685] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.186784] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.186946] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.187129] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.194431] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ef0fb86-0597-44de-b565-6b06319c8dfa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.209705] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 831.209705] env[62383]: value = "task-2451706" [ 831.209705] env[62383]: _type = "Task" [ 831.209705] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.220022] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451706, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.230157] env[62383]: DEBUG nova.objects.base [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Object Instance<93234e99-268f-491e-96bd-a77f4c9f164b> lazy-loaded attributes: info_cache,migration_context {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 831.231275] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c87e6c-8d6d-4fac-bba9-98d68532248d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.252948] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25644968-1b4d-43af-b1ab-bc1d14dd84da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.259210] env[62383]: DEBUG oslo_vmware.api [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 831.259210] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5286e97d-9773-815e-54bd-8d4bda6ee98b" [ 831.259210] env[62383]: _type = "Task" [ 831.259210] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.266640] env[62383]: DEBUG oslo_vmware.api [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5286e97d-9773-815e-54bd-8d4bda6ee98b, 'name': SearchDatastore_Task, 'duration_secs': 0.006154} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.269367] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.362604] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8b0805-47fb-4c7d-ad61-d2e619534b53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.369738] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91bd3d3-9a83-4ff4-b534-8f3e46ae24ec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.402128] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b10f06-59b6-4873-a079-476bf99e109b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.410783] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d8bb09-c0ee-4f23-b979-11241cc72f95 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.424856] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.536573] env[62383]: DEBUG oslo_vmware.api [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451704, 'name': ResetVM_Task, 'duration_secs': 0.12847} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.536925] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Did hard reboot of VM {{(pid=62383) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 831.537169] env[62383]: DEBUG nova.compute.manager [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 831.537953] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba31be10-c661-4ebd-8a7b-01109ec626a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.596662] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451705, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065921} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.596974] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 831.597774] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53490907-819c-4bd9-8dcb-f683db4a69a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.618784] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] bc37e114-cf55-408b-9841-05eaf411b4f5/bc37e114-cf55-408b-9841-05eaf411b4f5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.619316] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ebda5a7-6c80-4d2a-bf11-87cfb57de21b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.637806] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 831.637806] env[62383]: value = "task-2451707" [ 831.637806] env[62383]: _type = "Task" [ 831.637806] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.645564] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451707, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.720151] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451706, 'name': ReconfigVM_Task, 'duration_secs': 0.152589} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.721127] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2436be9d-d189-406a-abb9-696d0181b330 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.740707] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:26:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='4fbcd04d-cb08-4e45-b5c9-0176dc87583e',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1902383657',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.740707] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.740707] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.742887] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.742887] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.742887] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.742887] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.742887] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.743114] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.743114] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.743114] env[62383]: DEBUG nova.virt.hardware [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.745163] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97a7bb56-a5e6-4878-996b-03fcc2ba0dfa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.751514] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 831.751514] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525dc2d4-6950-7d42-bda4-e051b04d5a19" [ 831.751514] env[62383]: _type = "Task" [ 831.751514] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.759262] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525dc2d4-6950-7d42-bda4-e051b04d5a19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.869053] env[62383]: DEBUG nova.network.neutron [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updated VIF entry in instance network info cache for port 845110d3-620c-4852-8aab-e6907d5b3af2. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 831.869462] env[62383]: DEBUG nova.network.neutron [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updating instance_info_cache with network_info: [{"id": "845110d3-620c-4852-8aab-e6907d5b3af2", "address": "fa:16:3e:0d:f6:13", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845110d3-62", "ovs_interfaceid": "845110d3-620c-4852-8aab-e6907d5b3af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.928228] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 832.054907] env[62383]: DEBUG oslo_concurrency.lockutils [None req-acf38a40-6a52-4b40-b5d8-1a62e90b042c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.366s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.148726] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.263625] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525dc2d4-6950-7d42-bda4-e051b04d5a19, 'name': SearchDatastore_Task, 'duration_secs': 0.017757} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.269806] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 832.270174] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2185bcec-886c-47ab-a9b2-0385f4e04833 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.288945] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 832.288945] env[62383]: value = "task-2451708" [ 832.288945] env[62383]: _type = "Task" [ 832.288945] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.299141] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451708, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.373222] env[62383]: DEBUG oslo_concurrency.lockutils [req-84441612-bbfe-482c-8a41-3796df87e0ce req-9edd2083-7133-405f-9a87-4a30f097ff27 service nova] Releasing lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.434104] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 832.434104] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.717s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.434337] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.966s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.435885] env[62383]: INFO nova.compute.claims [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.439242] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.443168] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Cleaning up deleted instances {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 832.652203] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451707, 'name': ReconfigVM_Task, 'duration_secs': 0.898503} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.652203] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Reconfigured VM instance instance-00000041 to attach disk [datastore1] bc37e114-cf55-408b-9841-05eaf411b4f5/bc37e114-cf55-408b-9841-05eaf411b4f5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.652203] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e78efac9-0361-4915-98cb-14161a99e2bd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.657926] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 832.657926] env[62383]: value = "task-2451709" [ 832.657926] env[62383]: _type = "Task" [ 832.657926] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.668190] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451709, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.799402] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451708, 'name': ReconfigVM_Task, 'duration_secs': 0.202827} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.800347] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 832.800500] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf05fd91-2aff-4252-bd58-1b8cdf6d7582 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.822854] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.823207] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd54e1e2-3abf-459e-89e7-ffc269185cfd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.842564] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 832.842564] env[62383]: value = "task-2451710" [ 832.842564] env[62383]: _type = "Task" [ 832.842564] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.852444] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451710, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.955269] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] There are 34 instances to clean {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 832.955555] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: bc1e1f0c-a86d-4d31-a8c4-45d362e9b807] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 833.156795] env[62383]: DEBUG nova.compute.manager [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Received event network-changed-df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 833.156966] env[62383]: DEBUG nova.compute.manager [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Refreshing instance network info cache due to event network-changed-df613873-27c1-4191-ad83-7321eb499e0b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 833.157236] env[62383]: DEBUG oslo_concurrency.lockutils [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] Acquiring lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.157466] env[62383]: DEBUG oslo_concurrency.lockutils [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] Acquired lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.157670] env[62383]: DEBUG nova.network.neutron [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Refreshing network info cache for port df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.173338] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451709, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.228996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.229324] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.229816] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "6fda89ec-aee1-4c1e-b005-51a9742abb19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.230057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.230267] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.232454] env[62383]: INFO nova.compute.manager [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Terminating instance [ 833.352549] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451710, 'name': ReconfigVM_Task, 'duration_secs': 0.32979} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.355259] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a/9604eadf-a027-46dd-989b-0d4b752f883a.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.356891] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf9c919-08f7-4143-bf70-93dd9aaef73f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.377014] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8775ac5-f0c9-4a32-a09e-b75bde3cabfc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.397721] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6586e939-6bb8-40bd-81e0-1c1bcacbd1e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.417422] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9cc727-8255-4e85-b685-73a3b97038f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.423924] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.424179] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0060eef0-5f5c-4c44-9a39-882e76ed1c0e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.430294] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 833.430294] env[62383]: value = "task-2451711" [ 833.430294] env[62383]: _type = "Task" [ 833.430294] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.434464] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399534b7-47ce-4d8f-b5c8-a254f69a3154 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.440938] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.443499] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c7e052-910c-49fa-9d75-d255de916ddf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.477206] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 3f508af0-68a2-4898-b9ae-d84cdb8a4cd9] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 833.479270] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e62544c-714d-4e11-b5b9-72ed81760281 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.486938] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676e3996-205f-4ea5-9430-41a3b34f3169 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.500372] env[62383]: DEBUG nova.compute.provider_tree [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.675189] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451709, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.736156] env[62383]: DEBUG nova.compute.manager [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 833.736390] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.737454] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f8f9a8-ece1-4e82-9fc1-c6316201c644 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.745012] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.745403] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f60e573d-a8cc-4a35-89ff-628636e20c6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.751134] env[62383]: DEBUG oslo_vmware.api [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 833.751134] env[62383]: value = "task-2451712" [ 833.751134] env[62383]: _type = "Task" [ 833.751134] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.759467] env[62383]: DEBUG oslo_vmware.api [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451712, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.883365] env[62383]: DEBUG nova.network.neutron [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updated VIF entry in instance network info cache for port df613873-27c1-4191-ad83-7321eb499e0b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.883365] env[62383]: DEBUG nova.network.neutron [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updating instance_info_cache with network_info: [{"id": "df613873-27c1-4191-ad83-7321eb499e0b", "address": "fa:16:3e:8b:10:34", "network": {"id": "78c2d974-4a9e-4933-b7e4-e45bdfe89808", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1780687027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28e37dc42ac74824b43bd4b120a52674", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec46b14d-3310-4f2b-96c1-f53ee47d3759", "external-id": "nsx-vlan-transportzone-323", "segmentation_id": 323, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf613873-27", "ovs_interfaceid": "df613873-27c1-4191-ad83-7321eb499e0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.942417] env[62383]: DEBUG oslo_vmware.api [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451711, 'name': PowerOnVM_Task, 'duration_secs': 0.394919} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.942714] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.983383] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0f48434f-859f-4910-883f-2f81be647bad] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 834.003666] env[62383]: DEBUG nova.scheduler.client.report [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.171065] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451709, 'name': Rename_Task, 'duration_secs': 1.058085} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.171065] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.171065] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-affa28b4-2616-45a1-93ca-1b430e6d47ee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.176854] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 834.176854] env[62383]: value = "task-2451713" [ 834.176854] env[62383]: _type = "Task" [ 834.176854] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.184618] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.261154] env[62383]: DEBUG oslo_vmware.api [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451712, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.386734] env[62383]: DEBUG oslo_concurrency.lockutils [req-95cc3f22-e6b1-4cf7-9bbe-26bdd45b17f6 req-9eb42667-f7b4-47ec-9b40-dd2af975b364 service nova] Releasing lock "refresh_cache-6fda89ec-aee1-4c1e-b005-51a9742abb19" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.486069] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 563840a8-8fa7-4bfa-9912-933c14e7076a] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 834.509063] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.075s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.509581] env[62383]: DEBUG nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 834.514087] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.276s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.516962] env[62383]: INFO nova.compute.claims [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.687814] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451713, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.761390] env[62383]: DEBUG oslo_vmware.api [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451712, 'name': PowerOffVM_Task, 'duration_secs': 0.545079} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.761664] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 834.761834] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 834.762151] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74bbad97-87a8-46b8-9ac3-2c450f4bbab3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.825667] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 834.825924] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 834.826125] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Deleting the datastore file [datastore2] 6fda89ec-aee1-4c1e-b005-51a9742abb19 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.826398] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-747a0a5e-d1d8-4b41-988e-3ac72e843cd3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.833638] env[62383]: DEBUG oslo_vmware.api [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 834.833638] env[62383]: value = "task-2451715" [ 834.833638] env[62383]: _type = "Task" [ 834.833638] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.841578] env[62383]: DEBUG oslo_vmware.api [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.956458] env[62383]: INFO nova.compute.manager [None req-02e6a65d-b53c-4a74-8331-9493e074cfd3 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance to original state: 'active' [ 834.989769] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a10f5b03-c45b-4cc2-923f-3227665d236c] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 835.015475] env[62383]: DEBUG nova.compute.utils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 835.017738] env[62383]: DEBUG nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 835.017940] env[62383]: DEBUG nova.network.neutron [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 835.074065] env[62383]: DEBUG nova.policy [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52222401600845bcb88d02f000771658', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2b77864c75943b4a625276225c3aac9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 835.187757] env[62383]: DEBUG oslo_vmware.api [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451713, 'name': PowerOnVM_Task, 'duration_secs': 0.858178} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.188034] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 835.188232] env[62383]: INFO nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Took 8.58 seconds to spawn the instance on the hypervisor. [ 835.188434] env[62383]: DEBUG nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 835.189210] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca8de99-b987-46f9-8256-3083f9f46853 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.342053] env[62383]: DEBUG nova.network.neutron [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Successfully created port: 9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.351754] env[62383]: DEBUG oslo_vmware.api [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293966} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.352309] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.352609] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.352815] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.352815] env[62383]: INFO nova.compute.manager [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Took 1.62 seconds to destroy the instance on the hypervisor. [ 835.352815] env[62383]: DEBUG oslo.service.loopingcall [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.353601] env[62383]: DEBUG nova.compute.manager [-] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 835.353689] env[62383]: DEBUG nova.network.neutron [-] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.493289] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 583138d1-f928-4e33-a443-11c627203c44] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 835.521116] env[62383]: DEBUG nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 835.709145] env[62383]: INFO nova.compute.manager [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Took 42.97 seconds to build instance. [ 835.942775] env[62383]: DEBUG nova.compute.manager [req-0b4a6a02-b2a1-4f4b-9d8b-2c7535a9554a req-453225db-5b81-4270-8bb6-8b1ef00de848 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Received event network-vif-deleted-df613873-27c1-4191-ad83-7321eb499e0b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 835.942968] env[62383]: INFO nova.compute.manager [req-0b4a6a02-b2a1-4f4b-9d8b-2c7535a9554a req-453225db-5b81-4270-8bb6-8b1ef00de848 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Neutron deleted interface df613873-27c1-4191-ad83-7321eb499e0b; detaching it from the instance and deleting it from the info cache [ 835.943159] env[62383]: DEBUG nova.network.neutron [req-0b4a6a02-b2a1-4f4b-9d8b-2c7535a9554a req-453225db-5b81-4270-8bb6-8b1ef00de848 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.996859] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0c01a974-2318-461b-965f-ba4932e3bea1] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 836.066537] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1534ee-70ed-46e4-be4b-7c8d27b68572 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.075035] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1f3bf9-ff04-4cf8-bc4d-98c3d22fd570 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.108400] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f50ee79-879b-4c2c-a33a-0a502dda7795 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.116239] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7fa1c3-7fd2-462e-9646-c2de2cedb836 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.131728] env[62383]: DEBUG nova.compute.provider_tree [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.212176] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c8a3e992-f539-4840-8d40-c3dbc8dfdac3 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.494s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.270854] env[62383]: DEBUG nova.network.neutron [-] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.447075] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61206479-cf28-4df1-bcd0-9a9262925dd3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.459941] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4722caaf-04af-48fc-98d3-f84e4c58fa51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.497923] env[62383]: DEBUG nova.compute.manager [req-0b4a6a02-b2a1-4f4b-9d8b-2c7535a9554a req-453225db-5b81-4270-8bb6-8b1ef00de848 service nova] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Detach interface failed, port_id=df613873-27c1-4191-ad83-7321eb499e0b, reason: Instance 6fda89ec-aee1-4c1e-b005-51a9742abb19 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 836.502884] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a16193af-410e-4bf6-bb06-a97791cf6060] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 836.531418] env[62383]: DEBUG nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 836.623645] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 836.623907] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.624085] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 836.624560] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.624560] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 836.624560] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 836.624770] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 836.624931] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 836.625121] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 836.625291] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 836.625466] env[62383]: DEBUG nova.virt.hardware [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 836.626525] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382bcdba-2cde-4c01-8313-19cf221ee2a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.635214] env[62383]: DEBUG nova.scheduler.client.report [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.639848] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78bd0af0-8b92-45da-be5d-efb721be3899 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.704109] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.704384] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.774432] env[62383]: INFO nova.compute.manager [-] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Took 1.42 seconds to deallocate network for instance. [ 837.007947] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 17498cb6-8b16-4a2e-96ae-c594966cee77] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 837.010097] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "9604eadf-a027-46dd-989b-0d4b752f883a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.011139] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.011736] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.012512] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.012823] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.014937] env[62383]: INFO nova.compute.manager [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Terminating instance [ 837.148021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.148021] env[62383]: DEBUG nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 837.148545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.190s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.148955] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.151691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.478s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.152000] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.154048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.113s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.154362] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.155998] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.340s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.157656] env[62383]: INFO nova.compute.claims [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.187602] env[62383]: INFO nova.scheduler.client.report [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleted allocations for instance 282f2c94-7a63-4eef-aa80-7d67d0a0972a [ 837.194817] env[62383]: INFO nova.scheduler.client.report [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Deleted allocations for instance a27fcace-4fb3-48fb-946d-b8057f6ee601 [ 837.204512] env[62383]: INFO nova.scheduler.client.report [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted allocations for instance 2337e9a2-736c-4d58-ac2e-04c8ad813be4 [ 837.208058] env[62383]: DEBUG nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 837.232518] env[62383]: DEBUG nova.network.neutron [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Successfully updated port: 9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.280422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.511147] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: f4d47cd8-1607-4e05-9e6a-3a1faf23ddbf] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 837.518921] env[62383]: DEBUG nova.compute.manager [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 837.519142] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 837.520108] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7839004-2749-456b-a961-2e07acb50def {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.533028] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 837.533174] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7c9a224-63a0-4848-97a5-05ab2d5ecffd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.539197] env[62383]: DEBUG oslo_vmware.api [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 837.539197] env[62383]: value = "task-2451717" [ 837.539197] env[62383]: _type = "Task" [ 837.539197] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.548927] env[62383]: DEBUG oslo_vmware.api [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451717, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.666043] env[62383]: DEBUG nova.compute.utils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.669446] env[62383]: DEBUG nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 837.669636] env[62383]: DEBUG nova.network.neutron [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.709791] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24cbf7d9-fb83-4be8-884e-1df792e527d6 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "282f2c94-7a63-4eef-aa80-7d67d0a0972a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.595s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.717135] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2e98981-4b25-4060-a7e8-d38104b5e4ac tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "a27fcace-4fb3-48fb-946d-b8057f6ee601" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.215s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.723447] env[62383]: DEBUG nova.policy [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb2a655195344ae08df15c3f21a17690', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f973903bfbec4d00b8988852450c7794', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.734582] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "refresh_cache-2eba2920-7912-475b-a198-890743aa5255" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.734582] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "refresh_cache-2eba2920-7912-475b-a198-890743aa5255" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.734582] env[62383]: DEBUG nova.network.neutron [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.737035] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7a339afd-9c55-41bc-bbda-152672075ebc tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "2337e9a2-736c-4d58-ac2e-04c8ad813be4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.220s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.738967] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.970291] env[62383]: DEBUG nova.compute.manager [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Received event network-vif-plugged-9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 837.970540] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] Acquiring lock "2eba2920-7912-475b-a198-890743aa5255-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.971080] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] Lock "2eba2920-7912-475b-a198-890743aa5255-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.971080] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] Lock "2eba2920-7912-475b-a198-890743aa5255-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.971595] env[62383]: DEBUG nova.compute.manager [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] No waiting events found dispatching network-vif-plugged-9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 837.971595] env[62383]: WARNING nova.compute.manager [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Received unexpected event network-vif-plugged-9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b for instance with vm_state building and task_state spawning. [ 837.971791] env[62383]: DEBUG nova.compute.manager [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Received event network-changed-9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 837.972016] env[62383]: DEBUG nova.compute.manager [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Refreshing instance network info cache due to event network-changed-9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 837.972222] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] Acquiring lock "refresh_cache-2eba2920-7912-475b-a198-890743aa5255" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.013313] env[62383]: DEBUG nova.network.neutron [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Successfully created port: ea141edc-2c5f-4ccb-9af1-fe4caec1c754 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.015425] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 9c2c55a9-5b24-4d52-8d6b-666609349a3a] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 838.049652] env[62383]: DEBUG oslo_vmware.api [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451717, 'name': PowerOffVM_Task, 'duration_secs': 0.259479} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.049921] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 838.050106] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 838.050353] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa45d6dc-8597-4bb1-b7eb-d19d484587e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.075302] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "da16da02-25ab-46f9-9070-9fdde0b3a75e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.075586] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.075798] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "da16da02-25ab-46f9-9070-9fdde0b3a75e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.075982] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.076176] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.078356] env[62383]: INFO nova.compute.manager [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Terminating instance [ 838.113715] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 838.114126] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 838.114336] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Deleting the datastore file [datastore2] 9604eadf-a027-46dd-989b-0d4b752f883a {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 838.114609] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7846e8dc-f76c-4e79-9970-f2b005ed2753 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.120878] env[62383]: DEBUG oslo_vmware.api [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 838.120878] env[62383]: value = "task-2451719" [ 838.120878] env[62383]: _type = "Task" [ 838.120878] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.129197] env[62383]: DEBUG oslo_vmware.api [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.176530] env[62383]: DEBUG nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 838.277068] env[62383]: DEBUG nova.network.neutron [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.377186] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.377508] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.377726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.377948] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.378168] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.380310] env[62383]: INFO nova.compute.manager [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Terminating instance [ 838.430341] env[62383]: DEBUG nova.network.neutron [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Updating instance_info_cache with network_info: [{"id": "9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b", "address": "fa:16:3e:c1:f5:06", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9117f7a4-bb", "ovs_interfaceid": "9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.518518] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 872ac212-9f29-426d-94c7-e1bf73aebd94] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 838.576779] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4528024-5eba-48a2-a770-6ddc5d61434d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.584546] env[62383]: DEBUG nova.compute.manager [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.584771] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.585572] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13b446a-ec73-4cae-8be9-d9d4886cd40b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.588773] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a90be1c-7e22-4f9c-92ab-663e4570948b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.595727] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.620621] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1837f0da-bf9b-4fbc-a141-15b1b0a92f10 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.623277] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669c5b4b-dd37-49a8-9980-e893b14d5ff1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.633603] env[62383]: DEBUG oslo_vmware.api [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146073} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.636813] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 838.636813] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 838.636915] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 838.637108] env[62383]: INFO nova.compute.manager [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 838.637360] env[62383]: DEBUG oslo.service.loopingcall [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.637978] env[62383]: DEBUG oslo_vmware.api [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 838.637978] env[62383]: value = "task-2451720" [ 838.637978] env[62383]: _type = "Task" [ 838.637978] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.637978] env[62383]: DEBUG nova.compute.manager [-] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 838.638267] env[62383]: DEBUG nova.network.neutron [-] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 838.640555] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec9b11e-745a-450c-97f3-c61b3418b913 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.658185] env[62383]: DEBUG nova.compute.provider_tree [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.662258] env[62383]: DEBUG oslo_vmware.api [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.889826] env[62383]: DEBUG nova.compute.manager [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.889826] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.889826] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe86b9ee-1faf-4df1-98ea-7f4b5e3e71de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.898367] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.898659] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4e180ea-b98f-48f2-afee-41bf19fa3337 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.905365] env[62383]: DEBUG oslo_vmware.api [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 838.905365] env[62383]: value = "task-2451721" [ 838.905365] env[62383]: _type = "Task" [ 838.905365] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.913172] env[62383]: DEBUG oslo_vmware.api [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.933258] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "refresh_cache-2eba2920-7912-475b-a198-890743aa5255" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.933583] env[62383]: DEBUG nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Instance network_info: |[{"id": "9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b", "address": "fa:16:3e:c1:f5:06", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9117f7a4-bb", "ovs_interfaceid": "9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 838.933866] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] Acquired lock "refresh_cache-2eba2920-7912-475b-a198-890743aa5255" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.934058] env[62383]: DEBUG nova.network.neutron [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Refreshing network info cache for port 9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.938929] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:f5:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.942757] env[62383]: DEBUG oslo.service.loopingcall [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.945691] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2eba2920-7912-475b-a198-890743aa5255] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.946170] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54bed1c6-bd54-4543-9221-42fae5ad7998 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.965995] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.965995] env[62383]: value = "task-2451722" [ 838.965995] env[62383]: _type = "Task" [ 838.965995] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.974886] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451722, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.024397] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 6b5daa17-ad4a-4b30-a1fe-083a1a238667] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 839.156409] env[62383]: DEBUG oslo_vmware.api [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451720, 'name': PowerOffVM_Task, 'duration_secs': 0.226631} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.157532] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.157532] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.157532] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c7eac20-4653-4846-aef7-133a3af7554e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.164018] env[62383]: DEBUG nova.scheduler.client.report [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 839.186805] env[62383]: DEBUG nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 839.189978] env[62383]: DEBUG nova.network.neutron [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Updated VIF entry in instance network info cache for port 9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.190468] env[62383]: DEBUG nova.network.neutron [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Updating instance_info_cache with network_info: [{"id": "9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b", "address": "fa:16:3e:c1:f5:06", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9117f7a4-bb", "ovs_interfaceid": "9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.217558] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 839.217808] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 839.217971] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 839.218174] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 839.218325] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 839.218474] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 839.218683] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 839.218878] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 839.219073] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 839.219267] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 839.219449] env[62383]: DEBUG nova.virt.hardware [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 839.220599] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1537ed05-eb61-4713-969c-c55ef278a9a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.229764] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19330eb6-c87a-46ea-9df3-7afc879f7747 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.284397] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.284795] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.285068] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleting the datastore file [datastore2] da16da02-25ab-46f9-9070-9fdde0b3a75e {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.285340] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f7631bb-374f-4f1e-9218-526a96a25130 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.292584] env[62383]: DEBUG oslo_vmware.api [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 839.292584] env[62383]: value = "task-2451724" [ 839.292584] env[62383]: _type = "Task" [ 839.292584] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.300629] env[62383]: DEBUG oslo_vmware.api [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.394743] env[62383]: DEBUG nova.network.neutron [-] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.414859] env[62383]: DEBUG oslo_vmware.api [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451721, 'name': PowerOffVM_Task, 'duration_secs': 0.328288} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.415501] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.429895] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.429895] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54f782b4-4da9-4f16-9656-e2db17ed7506 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.476554] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451722, 'name': CreateVM_Task, 'duration_secs': 0.381133} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.476761] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2eba2920-7912-475b-a198-890743aa5255] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.477563] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.477771] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.478184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 839.478487] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e5d86fb-133c-4cb3-a7ac-e4df8ef1ec38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.483347] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 839.483347] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527e7391-6db4-f2f4-1cd0-83b628db800c" [ 839.483347] env[62383]: _type = "Task" [ 839.483347] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.487983] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.488220] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.488463] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Deleting the datastore file [datastore2] ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.489187] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e9fd545-52cb-4a32-829b-099750ff1de7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.494603] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527e7391-6db4-f2f4-1cd0-83b628db800c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.498590] env[62383]: DEBUG oslo_vmware.api [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for the task: (returnval){ [ 839.498590] env[62383]: value = "task-2451726" [ 839.498590] env[62383]: _type = "Task" [ 839.498590] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.506146] env[62383]: DEBUG oslo_vmware.api [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451726, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.529886] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 330b5e35-3292-4df7-b288-547b158e671a] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 839.577671] env[62383]: DEBUG nova.network.neutron [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Successfully updated port: ea141edc-2c5f-4ccb-9af1-fe4caec1c754 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.669854] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.670441] env[62383]: DEBUG nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 839.673027] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 20.536s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.692983] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f8d74d8-28db-4b68-b9cd-fdeb6754b1a8 req-b38bb81a-2132-4ece-bcad-e0077c0a9144 service nova] Releasing lock "refresh_cache-2eba2920-7912-475b-a198-890743aa5255" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.806122] env[62383]: DEBUG oslo_vmware.api [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137722} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.806453] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.806714] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.806915] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.807115] env[62383]: INFO nova.compute.manager [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 839.807353] env[62383]: DEBUG oslo.service.loopingcall [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.807550] env[62383]: DEBUG nova.compute.manager [-] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.807648] env[62383]: DEBUG nova.network.neutron [-] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.897806] env[62383]: INFO nova.compute.manager [-] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Took 1.26 seconds to deallocate network for instance. [ 839.994257] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527e7391-6db4-f2f4-1cd0-83b628db800c, 'name': SearchDatastore_Task, 'duration_secs': 0.011848} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.994576] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.994878] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.995164] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.996049] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.996049] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.996049] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d64125db-6cf8-4612-b095-6a699dfd0421 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.005228] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.005418] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.006494] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebaa0256-d963-47d3-a2aa-ae44d8731433 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.011981] env[62383]: DEBUG oslo_vmware.api [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Task: {'id': task-2451726, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185574} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.012238] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.012543] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 840.012627] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 840.013359] env[62383]: INFO nova.compute.manager [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 840.013359] env[62383]: DEBUG oslo.service.loopingcall [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.013359] env[62383]: DEBUG nova.compute.manager [-] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 840.013359] env[62383]: DEBUG nova.network.neutron [-] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 840.016049] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 840.016049] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c5ad51-89d2-3764-3c13-57f851bd5f7a" [ 840.016049] env[62383]: _type = "Task" [ 840.016049] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.027022] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c5ad51-89d2-3764-3c13-57f851bd5f7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.032466] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 045e5f8f-edd5-425d-bccb-054d90db27d9] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 840.052143] env[62383]: DEBUG nova.compute.manager [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Received event network-vif-deleted-796c3e3e-48f2-4d7f-8f7d-974f792c4426 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 840.052368] env[62383]: DEBUG nova.compute.manager [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Received event network-vif-plugged-ea141edc-2c5f-4ccb-9af1-fe4caec1c754 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 840.052560] env[62383]: DEBUG oslo_concurrency.lockutils [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] Acquiring lock "a04a6a53-cca8-4e15-b840-cb1394e5b188-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.052764] env[62383]: DEBUG oslo_concurrency.lockutils [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.052936] env[62383]: DEBUG oslo_concurrency.lockutils [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.053455] env[62383]: DEBUG nova.compute.manager [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] No waiting events found dispatching network-vif-plugged-ea141edc-2c5f-4ccb-9af1-fe4caec1c754 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.053672] env[62383]: WARNING nova.compute.manager [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Received unexpected event network-vif-plugged-ea141edc-2c5f-4ccb-9af1-fe4caec1c754 for instance with vm_state building and task_state spawning. [ 840.053858] env[62383]: DEBUG nova.compute.manager [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Received event network-changed-ea141edc-2c5f-4ccb-9af1-fe4caec1c754 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 840.053991] env[62383]: DEBUG nova.compute.manager [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Refreshing instance network info cache due to event network-changed-ea141edc-2c5f-4ccb-9af1-fe4caec1c754. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 840.055198] env[62383]: DEBUG oslo_concurrency.lockutils [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] Acquiring lock "refresh_cache-a04a6a53-cca8-4e15-b840-cb1394e5b188" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.055198] env[62383]: DEBUG oslo_concurrency.lockutils [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] Acquired lock "refresh_cache-a04a6a53-cca8-4e15-b840-cb1394e5b188" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.055198] env[62383]: DEBUG nova.network.neutron [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Refreshing network info cache for port ea141edc-2c5f-4ccb-9af1-fe4caec1c754 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.080036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "refresh_cache-a04a6a53-cca8-4e15-b840-cb1394e5b188" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.176389] env[62383]: DEBUG nova.compute.utils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 840.180695] env[62383]: DEBUG nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 840.180907] env[62383]: DEBUG nova.network.neutron [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 840.236093] env[62383]: DEBUG nova.policy [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6324b3a4f5a24752b0bef1b5d79ea2ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fabc88f824a44c57b19a07a605fb89fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 840.405340] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.529565] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c5ad51-89d2-3764-3c13-57f851bd5f7a, 'name': SearchDatastore_Task, 'duration_secs': 0.013038} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.529970] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14f8c013-4805-40a8-a6a4-58a3765dc7f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.538187] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 60535a30-4602-4063-94a4-30ed01266d5b] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 840.540379] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 840.540379] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b4efd6-52b6-fc67-6fb4-ce7d9a5a7f17" [ 840.540379] env[62383]: _type = "Task" [ 840.540379] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.549621] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b4efd6-52b6-fc67-6fb4-ce7d9a5a7f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.601145] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd135f60-30b9-42c5-8d15-ba4ac04ce404 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.603755] env[62383]: DEBUG nova.network.neutron [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 840.610217] env[62383]: DEBUG nova.network.neutron [-] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.612294] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b73060-967d-47ba-9de3-967469338613 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.648967] env[62383]: DEBUG nova.network.neutron [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Successfully created port: 2ff20743-555a-49bd-964f-be249744a686 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.652708] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98787297-5952-413b-8a6f-051953bb10f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.665959] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f8c0b6-508d-46c4-87ee-145bdd6869bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.680940] env[62383]: DEBUG nova.compute.provider_tree [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 840.682551] env[62383]: DEBUG nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 840.732782] env[62383]: DEBUG nova.network.neutron [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.919232] env[62383]: DEBUG nova.network.neutron [-] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.045431] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: e51a0dd7-b5da-44cb-9cd8-62932aec3ad5] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 841.053668] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b4efd6-52b6-fc67-6fb4-ce7d9a5a7f17, 'name': SearchDatastore_Task, 'duration_secs': 0.009951} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.053934] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.054205] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2eba2920-7912-475b-a198-890743aa5255/2eba2920-7912-475b-a198-890743aa5255.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.054659] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fdd1a83-ed18-49ea-93ff-d23143f68e7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.064032] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 841.064032] env[62383]: value = "task-2451727" [ 841.064032] env[62383]: _type = "Task" [ 841.064032] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.072852] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451727, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.118310] env[62383]: INFO nova.compute.manager [-] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Took 1.31 seconds to deallocate network for instance. [ 841.161166] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "80821717-f961-49c7-8b79-c152edfdfb94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.161467] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.161618] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "80821717-f961-49c7-8b79-c152edfdfb94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.161728] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.162695] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.165167] env[62383]: INFO nova.compute.manager [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Terminating instance [ 841.206737] env[62383]: ERROR nova.scheduler.client.report [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [req-afd14e4e-d747-459b-b858-fd747de0da19] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-afd14e4e-d747-459b-b858-fd747de0da19"}]} [ 841.225105] env[62383]: DEBUG nova.scheduler.client.report [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 841.236253] env[62383]: DEBUG oslo_concurrency.lockutils [req-b6a2f5d0-6ae9-4c97-b64c-3cd6c6e3099b req-fe6e1163-680f-442f-8202-2a2ef40b6e3a service nova] Releasing lock "refresh_cache-a04a6a53-cca8-4e15-b840-cb1394e5b188" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.237116] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquired lock "refresh_cache-a04a6a53-cca8-4e15-b840-cb1394e5b188" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.237300] env[62383]: DEBUG nova.network.neutron [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.244622] env[62383]: DEBUG nova.scheduler.client.report [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 841.244913] env[62383]: DEBUG nova.compute.provider_tree [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.261675] env[62383]: DEBUG nova.scheduler.client.report [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 841.282389] env[62383]: DEBUG nova.scheduler.client.report [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 841.422212] env[62383]: INFO nova.compute.manager [-] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Took 1.41 seconds to deallocate network for instance. [ 841.548511] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 13db2c17-ccba-4336-929a-0d01202c5143] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 841.575661] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451727, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448686} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.575856] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 2eba2920-7912-475b-a198-890743aa5255/2eba2920-7912-475b-a198-890743aa5255.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 841.576078] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 841.576325] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3609bf52-350a-4be8-823e-2be20d1c9568 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.582958] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 841.582958] env[62383]: value = "task-2451728" [ 841.582958] env[62383]: _type = "Task" [ 841.582958] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.592957] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451728, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.628030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.670313] env[62383]: DEBUG nova.compute.manager [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 841.670542] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 841.671442] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b4c346-fb42-4ee6-84ab-6b307e1237c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.679145] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.679693] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91af7f56-9507-4ff9-8509-3bdfc4f586d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.687457] env[62383]: DEBUG oslo_vmware.api [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 841.687457] env[62383]: value = "task-2451729" [ 841.687457] env[62383]: _type = "Task" [ 841.687457] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.696283] env[62383]: DEBUG nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 841.698463] env[62383]: DEBUG oslo_vmware.api [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451729, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.722098] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 841.722376] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.722538] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 841.722748] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.722946] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 841.723151] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 841.723418] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 841.723624] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 841.723838] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 841.724062] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 841.724279] env[62383]: DEBUG nova.virt.hardware [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 841.725216] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b62ed36-3756-4ceb-b580-162df0eb7764 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.729796] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3782a2-8f5b-4e14-baaa-61ce9884d371 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.738660] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1af0ef-9849-4b85-8c1e-1aeee77ad799 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.744919] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52ae953-c6ec-4d7a-afe0-57bef21cfead {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.784702] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840e8b21-d3e1-4f7f-84fe-14e30d39c9dd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.792796] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f15cdb-184e-44e5-8a1b-2617b77b0e16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.797769] env[62383]: DEBUG nova.network.neutron [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.811529] env[62383]: DEBUG nova.compute.provider_tree [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.932501] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.981789] env[62383]: DEBUG nova.network.neutron [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Updating instance_info_cache with network_info: [{"id": "ea141edc-2c5f-4ccb-9af1-fe4caec1c754", "address": "fa:16:3e:29:2a:8f", "network": {"id": "82664c4e-44ff-44b4-a60f-2bae8b895e37", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1241176215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f973903bfbec4d00b8988852450c7794", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea141edc-2c", "ovs_interfaceid": "ea141edc-2c5f-4ccb-9af1-fe4caec1c754", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.054326] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: f28beb17-8455-49d3-8be0-7636b9abe4e8] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 842.095197] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451728, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063818} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.095474] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.096296] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ad63c0-7226-49cb-b772-0b03d1830616 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.122816] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 2eba2920-7912-475b-a198-890743aa5255/2eba2920-7912-475b-a198-890743aa5255.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.123149] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1ad3a25-2b0f-4928-9322-9aead3cc478c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.144543] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 842.144543] env[62383]: value = "task-2451730" [ 842.144543] env[62383]: _type = "Task" [ 842.144543] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.154195] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451730, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.175275] env[62383]: DEBUG nova.compute.manager [req-603c542b-a8a9-4668-a3be-0b643f4096ad req-a8341d08-aadd-4508-848f-f75c82f330c1 service nova] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Received event network-vif-deleted-2235952c-ebdd-41c6-9aa0-6353365f5ddf {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 842.175275] env[62383]: DEBUG nova.compute.manager [req-603c542b-a8a9-4668-a3be-0b643f4096ad req-a8341d08-aadd-4508-848f-f75c82f330c1 service nova] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Received event network-vif-deleted-241eb943-d5b6-4224-b2fb-c12596e3b206 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 842.198014] env[62383]: DEBUG oslo_vmware.api [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451729, 'name': PowerOffVM_Task, 'duration_secs': 0.215037} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.198302] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.199078] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 842.199078] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82d21225-12e7-4a43-b6e3-fe19afaf0404 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.204599] env[62383]: DEBUG nova.network.neutron [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Successfully updated port: 2ff20743-555a-49bd-964f-be249744a686 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.282046] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 842.282046] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 842.282046] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleting the datastore file [datastore1] 80821717-f961-49c7-8b79-c152edfdfb94 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.282046] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1bd4956-4388-4973-8a22-4bf539616d7a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.290698] env[62383]: DEBUG oslo_vmware.api [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for the task: (returnval){ [ 842.290698] env[62383]: value = "task-2451732" [ 842.290698] env[62383]: _type = "Task" [ 842.290698] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.299255] env[62383]: DEBUG oslo_vmware.api [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.346799] env[62383]: DEBUG nova.scheduler.client.report [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 842.347125] env[62383]: DEBUG nova.compute.provider_tree [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 95 to 96 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 842.347317] env[62383]: DEBUG nova.compute.provider_tree [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.485881] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Releasing lock "refresh_cache-a04a6a53-cca8-4e15-b840-cb1394e5b188" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.486270] env[62383]: DEBUG nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Instance network_info: |[{"id": "ea141edc-2c5f-4ccb-9af1-fe4caec1c754", "address": "fa:16:3e:29:2a:8f", "network": {"id": "82664c4e-44ff-44b4-a60f-2bae8b895e37", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1241176215-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f973903bfbec4d00b8988852450c7794", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea141edc-2c", "ovs_interfaceid": "ea141edc-2c5f-4ccb-9af1-fe4caec1c754", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.486694] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:2a:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea141edc-2c5f-4ccb-9af1-fe4caec1c754', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.495109] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Creating folder: Project (f973903bfbec4d00b8988852450c7794). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.495386] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-226161e0-d13a-4a3b-9c28-b3791b89fc5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.507964] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Created folder: Project (f973903bfbec4d00b8988852450c7794) in parent group-v496304. [ 842.508188] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Creating folder: Instances. Parent ref: group-v496491. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 842.508434] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-804c0689-53a5-4fd9-b2bc-fa11cbc5c10d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.518319] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Created folder: Instances in parent group-v496491. [ 842.518551] env[62383]: DEBUG oslo.service.loopingcall [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.518766] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.518992] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7ef166f-6f6d-470c-833b-097c7b7a9128 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.540707] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.540707] env[62383]: value = "task-2451735" [ 842.540707] env[62383]: _type = "Task" [ 842.540707] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.548944] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451735, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.557590] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0dd47ff3-5a5b-4c51-8e6a-fc11449f21be] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 842.657198] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.706829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "refresh_cache-7740a70f-3c95-49aa-b3ec-0e0effd3efcc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.707080] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "refresh_cache-7740a70f-3c95-49aa-b3ec-0e0effd3efcc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.707255] env[62383]: DEBUG nova.network.neutron [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 842.802217] env[62383]: DEBUG oslo_vmware.api [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Task: {'id': task-2451732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144504} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.802556] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 842.802855] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 842.803168] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 842.803445] env[62383]: INFO nova.compute.manager [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Took 1.13 seconds to destroy the instance on the hypervisor. [ 842.803797] env[62383]: DEBUG oslo.service.loopingcall [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.804096] env[62383]: DEBUG nova.compute.manager [-] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 842.804315] env[62383]: DEBUG nova.network.neutron [-] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.051525] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451735, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.061226] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0d992155-24fa-4836-83c9-8f188f7d7efa] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 843.163468] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451730, 'name': ReconfigVM_Task, 'duration_secs': 0.660576} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.163468] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 2eba2920-7912-475b-a198-890743aa5255/2eba2920-7912-475b-a198-890743aa5255.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 843.164717] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfd06a41-ecc8-4d77-bbf1-29ae47ec2ba8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.172694] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 843.172694] env[62383]: value = "task-2451736" [ 843.172694] env[62383]: _type = "Task" [ 843.172694] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.182501] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451736, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.262233] env[62383]: DEBUG nova.network.neutron [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.359862] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.687s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.360684] env[62383]: DEBUG nova.compute.manager [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62383) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 843.363897] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.054s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.364118] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.366242] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.755s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.366438] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.368233] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.918s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.368422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.370059] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.101s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.423636] env[62383]: INFO nova.scheduler.client.report [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted allocations for instance a68610a6-f684-4cc9-8dd4-8b90d2d379da [ 843.431983] env[62383]: INFO nova.scheduler.client.report [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Deleted allocations for instance 12e6baef-0614-4a12-b958-30b0f56fe486 [ 843.442948] env[62383]: INFO nova.scheduler.client.report [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Deleted allocations for instance 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a [ 843.468126] env[62383]: DEBUG nova.network.neutron [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Updating instance_info_cache with network_info: [{"id": "2ff20743-555a-49bd-964f-be249744a686", "address": "fa:16:3e:78:f3:8c", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff20743-55", "ovs_interfaceid": "2ff20743-555a-49bd-964f-be249744a686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.552149] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451735, 'name': CreateVM_Task, 'duration_secs': 0.543439} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.552353] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.556031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.556031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.556031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 843.556031] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7bc8631-66a1-4159-b41e-de7c6d12cbf3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.560086] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 843.560086] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524ae0a4-b4bf-0d1b-869f-e58c272959bd" [ 843.560086] env[62383]: _type = "Task" [ 843.560086] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.566713] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 184d0caa-85c2-426d-82e5-ac52e525fe74] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 843.572913] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524ae0a4-b4bf-0d1b-869f-e58c272959bd, 'name': SearchDatastore_Task, 'duration_secs': 0.01087} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.574516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.574516] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.574516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.574516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.574858] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.574858] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ee1ec97-318e-4261-9f99-8845b3f9c7af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.583215] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.583408] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.584166] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a73167bc-97c4-4935-9981-5b863604b613 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.589991] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 843.589991] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a43ea5-1b6e-fa8e-3473-956e9c172f09" [ 843.589991] env[62383]: _type = "Task" [ 843.589991] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.598208] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a43ea5-1b6e-fa8e-3473-956e9c172f09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.684177] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451736, 'name': Rename_Task, 'duration_secs': 0.267492} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.684454] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.684689] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95b3d11d-c2e8-482c-9aad-9bea5406d888 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.692358] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 843.692358] env[62383]: value = "task-2451737" [ 843.692358] env[62383]: _type = "Task" [ 843.692358] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.703344] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.814454] env[62383]: DEBUG nova.network.neutron [-] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.914195] env[62383]: INFO nova.scheduler.client.report [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted allocation for migration de2bbf7b-fb36-4da7-9a39-76edd8e5241b [ 843.943477] env[62383]: DEBUG oslo_concurrency.lockutils [None req-046aa2e0-3beb-42a8-9388-9e168b4849b4 tempest-ServerGroupTestJSON-168598164 tempest-ServerGroupTestJSON-168598164-project-member] Lock "12e6baef-0614-4a12-b958-30b0f56fe486" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.318s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.944660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-efa7b541-4485-458a-b86b-4f9a8c42248d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "a68610a6-f684-4cc9-8dd4-8b90d2d379da" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.666s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.949571] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bcc7aca8-d269-4d68-823a-9036d1bc4c2f tempest-FloatingIPsAssociationTestJSON-1857792412 tempest-FloatingIPsAssociationTestJSON-1857792412-project-member] Lock "3810ae49-3b9d-4c5f-b579-8abddc8d6c1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.558s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.971922] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "refresh_cache-7740a70f-3c95-49aa-b3ec-0e0effd3efcc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.972271] env[62383]: DEBUG nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Instance network_info: |[{"id": "2ff20743-555a-49bd-964f-be249744a686", "address": "fa:16:3e:78:f3:8c", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff20743-55", "ovs_interfaceid": "2ff20743-555a-49bd-964f-be249744a686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 843.972893] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:f3:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5f60c972-a72d-4c5f-a250-faadfd6eafbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ff20743-555a-49bd-964f-be249744a686', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 843.981130] env[62383]: DEBUG oslo.service.loopingcall [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.981578] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 843.984694] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3f545f8-0850-4841-8fea-08716124cd64 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.008616] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.008616] env[62383]: value = "task-2451738" [ 844.008616] env[62383]: _type = "Task" [ 844.008616] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.021771] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451738, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.074138] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 69569fa0-5175-453e-9875-9ef46c723da8] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 844.102475] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a43ea5-1b6e-fa8e-3473-956e9c172f09, 'name': SearchDatastore_Task, 'duration_secs': 0.008671} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.106507] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31012a62-27e9-4543-9e19-333b2a012acb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.113964] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 844.113964] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a01eaa-5be7-ac77-d050-aec13e1b23d4" [ 844.113964] env[62383]: _type = "Task" [ 844.113964] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.128200] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a01eaa-5be7-ac77-d050-aec13e1b23d4, 'name': SearchDatastore_Task, 'duration_secs': 0.011261} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.128308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.128625] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a04a6a53-cca8-4e15-b840-cb1394e5b188/a04a6a53-cca8-4e15-b840-cb1394e5b188.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.128826] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cc2cdd4-0fe4-4669-8e75-f42deae0d9bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.138963] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 844.138963] env[62383]: value = "task-2451739" [ 844.138963] env[62383]: _type = "Task" [ 844.138963] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.148116] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.207181] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451737, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.245031] env[62383]: DEBUG nova.compute.manager [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Received event network-vif-plugged-2ff20743-555a-49bd-964f-be249744a686 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 844.245184] env[62383]: DEBUG oslo_concurrency.lockutils [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] Acquiring lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.245368] env[62383]: DEBUG oslo_concurrency.lockutils [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.245531] env[62383]: DEBUG oslo_concurrency.lockutils [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.245692] env[62383]: DEBUG nova.compute.manager [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] No waiting events found dispatching network-vif-plugged-2ff20743-555a-49bd-964f-be249744a686 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 844.245886] env[62383]: WARNING nova.compute.manager [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Received unexpected event network-vif-plugged-2ff20743-555a-49bd-964f-be249744a686 for instance with vm_state building and task_state spawning. [ 844.246059] env[62383]: DEBUG nova.compute.manager [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Received event network-changed-2ff20743-555a-49bd-964f-be249744a686 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 844.246214] env[62383]: DEBUG nova.compute.manager [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Refreshing instance network info cache due to event network-changed-2ff20743-555a-49bd-964f-be249744a686. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 844.246391] env[62383]: DEBUG oslo_concurrency.lockutils [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] Acquiring lock "refresh_cache-7740a70f-3c95-49aa-b3ec-0e0effd3efcc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.246520] env[62383]: DEBUG oslo_concurrency.lockutils [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] Acquired lock "refresh_cache-7740a70f-3c95-49aa-b3ec-0e0effd3efcc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.246676] env[62383]: DEBUG nova.network.neutron [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Refreshing network info cache for port 2ff20743-555a-49bd-964f-be249744a686 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.317620] env[62383]: INFO nova.compute.manager [-] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Took 1.51 seconds to deallocate network for instance. [ 844.332731] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4eadf03-2fad-42d1-ae13-a77749616bf2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.342356] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c573f7d-9354-4881-9bef-0d6618932f6c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.380182] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3585f9-7643-4c52-9249-949711392acd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.387812] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ebbb0b-053a-43c7-aebd-eb130bd3a827 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.405909] env[62383]: DEBUG nova.compute.provider_tree [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.422206] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25612750-617f-459b-a912-50c792567db2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 28.741s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.521835] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451738, 'name': CreateVM_Task, 'duration_secs': 0.454124} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.522047] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 844.522784] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.522967] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.523329] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 844.523647] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da7466e6-bdf9-4a83-b58d-af04c86cff11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.530194] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 844.530194] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525edece-f91e-aa0f-fe55-b4ea622dd34a" [ 844.530194] env[62383]: _type = "Task" [ 844.530194] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.539855] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525edece-f91e-aa0f-fe55-b4ea622dd34a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.577383] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: eedadcc7-d02e-4a21-a43a-1dccde81b3b4] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 844.654417] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451739, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49633} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.654417] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a04a6a53-cca8-4e15-b840-cb1394e5b188/a04a6a53-cca8-4e15-b840-cb1394e5b188.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.654417] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.654417] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1c60b3b-ae26-44e0-abdd-2e20baf20178 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.659911] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 844.659911] env[62383]: value = "task-2451740" [ 844.659911] env[62383]: _type = "Task" [ 844.659911] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.667914] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.704458] env[62383]: DEBUG oslo_vmware.api [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451737, 'name': PowerOnVM_Task, 'duration_secs': 0.539197} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.704732] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.704940] env[62383]: INFO nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Took 8.17 seconds to spawn the instance on the hypervisor. [ 844.705140] env[62383]: DEBUG nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.705911] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05c9d09-b5f0-4087-9f60-8b939a60c2d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.824851] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.910240] env[62383]: DEBUG nova.scheduler.client.report [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.972861] env[62383]: DEBUG nova.network.neutron [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Updated VIF entry in instance network info cache for port 2ff20743-555a-49bd-964f-be249744a686. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 844.973237] env[62383]: DEBUG nova.network.neutron [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Updating instance_info_cache with network_info: [{"id": "2ff20743-555a-49bd-964f-be249744a686", "address": "fa:16:3e:78:f3:8c", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff20743-55", "ovs_interfaceid": "2ff20743-555a-49bd-964f-be249744a686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.041948] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525edece-f91e-aa0f-fe55-b4ea622dd34a, 'name': SearchDatastore_Task, 'duration_secs': 0.054671} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.042333] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.042585] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.042823] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.042972] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.043187] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.043516] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e975ec4b-4e57-4618-802c-f987650904af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.061520] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.061666] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.062407] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67cf5199-bf66-48ff-b1bc-14a385e5b916 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.068520] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 845.068520] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6c1bc-229e-4275-9d23-21bdcd2005cb" [ 845.068520] env[62383]: _type = "Task" [ 845.068520] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.077049] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6c1bc-229e-4275-9d23-21bdcd2005cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.080579] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 67d41910-54e1-48f1-b0d3-f34a62595ef2] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 845.115763] env[62383]: DEBUG nova.objects.instance [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'flavor' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.169970] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067735} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.170265] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.171041] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080c1371-a430-47ec-a32f-cb75773c90ac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.196104] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] a04a6a53-cca8-4e15-b840-cb1394e5b188/a04a6a53-cca8-4e15-b840-cb1394e5b188.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.196452] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0780cb5-d909-4771-8d03-672f4cf448b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.227509] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 845.227509] env[62383]: value = "task-2451741" [ 845.227509] env[62383]: _type = "Task" [ 845.227509] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.228554] env[62383]: INFO nova.compute.manager [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Took 43.78 seconds to build instance. [ 845.236358] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451741, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.477104] env[62383]: DEBUG oslo_concurrency.lockutils [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] Releasing lock "refresh_cache-7740a70f-3c95-49aa-b3ec-0e0effd3efcc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.478029] env[62383]: DEBUG nova.compute.manager [req-c23404ca-f524-4ce8-8c60-3f7aca3c8fc7 req-04997dd5-4fae-4e95-9cc7-80da7c11dc9e service nova] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Received event network-vif-deleted-f6691e8a-b0ad-4db1-b2f2-c313f3c50a51 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 845.581767] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6c1bc-229e-4275-9d23-21bdcd2005cb, 'name': SearchDatastore_Task, 'duration_secs': 0.00968} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.582671] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31bde637-ad8d-4aa6-b78e-6e5c1a075b3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.585461] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: dd0ad4e3-a6e6-4258-b960-544984e24ebc] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 845.592509] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 845.592509] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52538c25-52e9-ce0f-7d9e-4f6527d2a5a0" [ 845.592509] env[62383]: _type = "Task" [ 845.592509] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.607915] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52538c25-52e9-ce0f-7d9e-4f6527d2a5a0, 'name': SearchDatastore_Task, 'duration_secs': 0.010676} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.608260] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.608536] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 7740a70f-3c95-49aa-b3ec-0e0effd3efcc/7740a70f-3c95-49aa-b3ec-0e0effd3efcc.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 845.608881] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29eb8ad0-03b4-45e9-86b0-bec7a5b297a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.623214] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 845.623214] env[62383]: value = "task-2451742" [ 845.623214] env[62383]: _type = "Task" [ 845.623214] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.624864] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.625042] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.625230] env[62383]: DEBUG nova.network.neutron [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.625405] env[62383]: DEBUG nova.objects.instance [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'info_cache' on Instance uuid 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.638525] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.730768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4a651feb-dfa8-49fe-8eae-4dd275d40607 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "2eba2920-7912-475b-a198-890743aa5255" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.293s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.735354] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.929141] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.556s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.929727] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.649s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.929845] env[62383]: DEBUG nova.objects.instance [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lazy-loading 'resources' on Instance uuid 6fda89ec-aee1-4c1e-b005-51a9742abb19 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 846.090574] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 14bb9b79-d224-4a64-861e-30dd919c5741] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 846.136024] env[62383]: DEBUG nova.objects.base [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Object Instance<8a165d96-f503-4bc5-bff4-e6a85201e137> lazy-loaded attributes: flavor,info_cache {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 846.140703] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451742, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487434} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.141255] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 7740a70f-3c95-49aa-b3ec-0e0effd3efcc/7740a70f-3c95-49aa-b3ec-0e0effd3efcc.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 846.141480] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 846.141738] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cfa9527f-a553-441c-9fc3-1a3fa208c403 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.150013] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 846.150013] env[62383]: value = "task-2451743" [ 846.150013] env[62383]: _type = "Task" [ 846.150013] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.160209] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.238881] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451741, 'name': ReconfigVM_Task, 'duration_secs': 0.946812} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.241193] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Reconfigured VM instance instance-00000043 to attach disk [datastore2] a04a6a53-cca8-4e15-b840-cb1394e5b188/a04a6a53-cca8-4e15-b840-cb1394e5b188.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.241193] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-731e7634-0e02-4d36-9d39-652a7526ffd9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.253810] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 846.253810] env[62383]: value = "task-2451744" [ 846.253810] env[62383]: _type = "Task" [ 846.253810] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.264280] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451744, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.507332] env[62383]: INFO nova.scheduler.client.report [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted allocation for migration 88f6ac26-e38f-4ff9-9ba8-0b8c72f3e3ab [ 846.593985] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a170fd95-3f7f-4315-a063-b9d02a7a1af4] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 846.661196] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069614} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.661488] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 846.662420] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb88b89-d5b0-4f1a-96aa-9cd1f4d14223 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.693174] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 7740a70f-3c95-49aa-b3ec-0e0effd3efcc/7740a70f-3c95-49aa-b3ec-0e0effd3efcc.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.695892] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-284ef563-8623-4c13-9720-e6da59e76893 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.720704] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 846.720704] env[62383]: value = "task-2451745" [ 846.720704] env[62383]: _type = "Task" [ 846.720704] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.730952] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.770947] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451744, 'name': Rename_Task, 'duration_secs': 0.267235} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.773893] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.774303] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0545b2f4-dc21-414b-891a-9b2f9388f979 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.781790] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 846.781790] env[62383]: value = "task-2451746" [ 846.781790] env[62383]: _type = "Task" [ 846.781790] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.795018] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451746, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.953539] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3126db-ea0e-4576-b9cf-3a9f519d7d69 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.957460] env[62383]: DEBUG nova.network.neutron [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [{"id": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "address": "fa:16:3e:ab:2f:e4", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap925071ab-96", "ovs_interfaceid": "925071ab-96dd-4c80-901e-9dba6c4a5a9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.963948] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63edc23f-b559-4973-aede-7755f5119682 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.001439] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737f500c-7ca0-4da5-b794-acbf232ff346 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.012969] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4477b2a8-1f25-4a27-ad87-ee689419bddc tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 18.666s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.018155] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0067ad-406d-4ac6-b45a-a254aaa59e6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.035069] env[62383]: DEBUG nova.compute.provider_tree [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.098993] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 571a5250-8655-4f30-b193-919affbc1bd8] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 847.233808] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451745, 'name': ReconfigVM_Task, 'duration_secs': 0.466929} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.234360] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 7740a70f-3c95-49aa-b3ec-0e0effd3efcc/7740a70f-3c95-49aa-b3ec-0e0effd3efcc.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 847.234797] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d700181f-d9f8-46c0-9122-8bb4747006e0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.243689] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 847.243689] env[62383]: value = "task-2451747" [ 847.243689] env[62383]: _type = "Task" [ 847.243689] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.254136] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451747, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.294683] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451746, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.460330] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-8a165d96-f503-4bc5-bff4-e6a85201e137" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.559884] env[62383]: ERROR nova.scheduler.client.report [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [req-046594fc-466a-4cad-866c-c40a693b281c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-046594fc-466a-4cad-866c-c40a693b281c"}]} [ 847.580463] env[62383]: DEBUG nova.scheduler.client.report [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 847.597260] env[62383]: DEBUG nova.scheduler.client.report [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 847.597901] env[62383]: DEBUG nova.compute.provider_tree [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 847.603124] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 9659a2dd-f1da-4a8e-a740-1ec01f96940c] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 847.608945] env[62383]: DEBUG nova.scheduler.client.report [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 847.635893] env[62383]: DEBUG nova.scheduler.client.report [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 847.756601] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451747, 'name': Rename_Task, 'duration_secs': 0.213551} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.760124] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.760124] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04aa288b-6823-48d2-a95f-ab4f6e18880a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.768190] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 847.768190] env[62383]: value = "task-2451748" [ 847.768190] env[62383]: _type = "Task" [ 847.768190] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.780117] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.825863] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451746, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.869551] env[62383]: DEBUG nova.objects.instance [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lazy-loading 'flavor' on Instance uuid 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.013910] env[62383]: DEBUG nova.compute.manager [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.017299] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95515fb8-da94-4e2b-b6e4-f82ea53679b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.107598] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: ab338058-13c8-4df9-ba55-fabe1952557d] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 848.189934] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5e1337-bb25-4881-8e1e-3fb6ecb29ad8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.201029] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7860c5-5259-42a5-a1e9-81fd87a4993e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.261306] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3fe059-553f-458d-9d6f-1b123017f793 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.281619] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee11c8ce-6fc9-4c5f-bcbf-1c6cfbcc0806 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.300064] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451748, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.313308] env[62383]: DEBUG nova.compute.provider_tree [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 848.319034] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451746, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.376510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.376805] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquired lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.466361] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 848.466703] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae605d32-6902-4da4-956f-31d852d8f4aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.475251] env[62383]: DEBUG oslo_vmware.api [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 848.475251] env[62383]: value = "task-2451749" [ 848.475251] env[62383]: _type = "Task" [ 848.475251] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.484155] env[62383]: DEBUG oslo_vmware.api [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.530742] env[62383]: INFO nova.compute.manager [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] instance snapshotting [ 848.538369] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9b82fb-a885-4971-86f2-02897d603034 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.557785] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-541c90f0-1f08-4ac7-9544-b22cc2c16413 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.613792] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8a2b209c-423c-446c-a769-f7d7820d46da] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 848.783047] env[62383]: DEBUG oslo_vmware.api [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451748, 'name': PowerOnVM_Task, 'duration_secs': 0.867599} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.783751] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.784027] env[62383]: INFO nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Took 7.09 seconds to spawn the instance on the hypervisor. [ 848.784254] env[62383]: DEBUG nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.785237] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeb358c-c713-4d2b-aeb1-fd1d31266709 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.812059] env[62383]: DEBUG oslo_vmware.api [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451746, 'name': PowerOnVM_Task, 'duration_secs': 1.988121} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.812843] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.815081] env[62383]: INFO nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Took 9.63 seconds to spawn the instance on the hypervisor. [ 848.815081] env[62383]: DEBUG nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.815081] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b12b6b4-9179-41a1-ac6a-30b75eb96992 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.865753] env[62383]: DEBUG nova.scheduler.client.report [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 848.866766] env[62383]: DEBUG nova.compute.provider_tree [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 98 to 99 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 848.867034] env[62383]: DEBUG nova.compute.provider_tree [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 848.986522] env[62383]: DEBUG oslo_vmware.api [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451749, 'name': PowerOnVM_Task, 'duration_secs': 0.463243} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.986937] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.987263] env[62383]: DEBUG nova.compute.manager [None req-1b56107f-a802-4221-895d-1fa15fc63b8f tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 848.988293] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3913f1c-cd30-4ea1-af53-69b6abf98f21 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.064422] env[62383]: DEBUG nova.network.neutron [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.066202] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "93234e99-268f-491e-96bd-a77f4c9f164b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.066202] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.066400] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.069253] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.069253] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.072269] env[62383]: INFO nova.compute.manager [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Terminating instance [ 849.078986] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 849.079221] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-589aa07a-0f57-478c-9ce6-1e065db1e875 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.088499] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 849.088499] env[62383]: value = "task-2451750" [ 849.088499] env[62383]: _type = "Task" [ 849.088499] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.103577] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451750, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.117404] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8e911bad-5408-4588-9865-912ce4457d34] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 849.146993] env[62383]: DEBUG nova.compute.manager [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Received event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 849.147323] env[62383]: DEBUG nova.compute.manager [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing instance network info cache due to event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 849.147567] env[62383]: DEBUG oslo_concurrency.lockutils [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] Acquiring lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.314027] env[62383]: INFO nova.compute.manager [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Took 31.54 seconds to build instance. [ 849.336210] env[62383]: INFO nova.compute.manager [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Took 46.12 seconds to build instance. [ 849.373725] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.444s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.376362] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.638s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.378140] env[62383]: INFO nova.compute.claims [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.397587] env[62383]: INFO nova.scheduler.client.report [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Deleted allocations for instance 6fda89ec-aee1-4c1e-b005-51a9742abb19 [ 849.578586] env[62383]: DEBUG nova.compute.manager [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 849.578919] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 849.579939] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95d8fc3-53a8-4c1f-b2df-b7a7decf8e58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.591149] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.595010] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4f7d7eb-174e-450e-ba22-7b535eccfb6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.603114] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451750, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.604523] env[62383]: DEBUG oslo_vmware.api [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 849.604523] env[62383]: value = "task-2451751" [ 849.604523] env[62383]: _type = "Task" [ 849.604523] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.618481] env[62383]: DEBUG oslo_vmware.api [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.622114] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 7b8c8c12-fcf3-4b54-ae22-3aead1344803] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 849.780241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.780241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.815928] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8d298b57-764a-4755-8e7e-71dd563f9b4b tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.059s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.838180] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c556c9cf-553f-4790-abbd-eb6879356bb9 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.628s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.891175] env[62383]: DEBUG nova.network.neutron [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.904518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ed9ed19c-60b0-4970-8960-9c1a5f68136b tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "6fda89ec-aee1-4c1e-b005-51a9742abb19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.675s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.105796] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451750, 'name': CreateSnapshot_Task, 'duration_secs': 0.741281} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.110719] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 850.117271] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc821f8e-f0ca-4ccf-83b9-414ca839d419 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.125661] env[62383]: DEBUG oslo_vmware.api [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451751, 'name': PowerOffVM_Task, 'duration_secs': 0.346771} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.129795] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.130077] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.132469] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.132978] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Cleaning up deleted instances with incomplete migration {{(pid=62383) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 850.135630] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c2b76216-6211-4fbb-a673-e6d8164ec614 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.217199] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.217433] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.217632] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleting the datastore file [datastore2] 93234e99-268f-491e-96bd-a77f4c9f164b {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.217903] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09d2e4ef-7129-4b66-8c64-770d5cb9c1d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.225515] env[62383]: DEBUG oslo_vmware.api [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 850.225515] env[62383]: value = "task-2451753" [ 850.225515] env[62383]: _type = "Task" [ 850.225515] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.235203] env[62383]: DEBUG oslo_vmware.api [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.281803] env[62383]: DEBUG nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 850.395609] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Releasing lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.395943] env[62383]: DEBUG nova.compute.manager [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Inject network info {{(pid=62383) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 850.396261] env[62383]: DEBUG nova.compute.manager [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] network_info to inject: |[{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 850.403917] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Reconfiguring VM instance to set the machine id {{(pid=62383) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 850.405808] env[62383]: DEBUG oslo_concurrency.lockutils [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] Acquired lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.406171] env[62383]: DEBUG nova.network.neutron [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.408067] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00784fc1-a9bd-485e-9c09-2f9a6b5f9fa8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.428149] env[62383]: DEBUG oslo_vmware.api [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 850.428149] env[62383]: value = "task-2451754" [ 850.428149] env[62383]: _type = "Task" [ 850.428149] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.442166] env[62383]: DEBUG oslo_vmware.api [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451754, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.474311] env[62383]: DEBUG nova.objects.instance [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lazy-loading 'flavor' on Instance uuid 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.621359] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "8a165d96-f503-4bc5-bff4-e6a85201e137" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.622181] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.622571] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.622888] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.623185] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.626940] env[62383]: INFO nova.compute.manager [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Terminating instance [ 850.651125] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 850.652388] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.653437] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b994a331-74a0-4f08-ab2a-1b04ca71659a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.667649] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 850.667649] env[62383]: value = "task-2451755" [ 850.667649] env[62383]: _type = "Task" [ 850.667649] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.681461] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451755, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.743744] env[62383]: DEBUG oslo_vmware.api [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150597} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.744017] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.744214] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.744439] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.744645] env[62383]: INFO nova.compute.manager [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 850.744890] env[62383]: DEBUG oslo.service.loopingcall [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.747744] env[62383]: DEBUG nova.compute.manager [-] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 850.747744] env[62383]: DEBUG nova.network.neutron [-] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 850.814371] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.874597] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac1400d-7a2d-4f53-8931-539a91acffbb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.884201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3c90d1-560e-4128-b8f3-537b3b3291f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.919021] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4471a17-6adb-4480-be0a-ace94db8ac41 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.929216] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9d726c-280f-4c73-b44b-43a4b05df27c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.948020] env[62383]: DEBUG nova.compute.manager [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.948020] env[62383]: DEBUG oslo_vmware.api [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451754, 'name': ReconfigVM_Task, 'duration_secs': 0.25384} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.955587] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155cd56b-aa8e-4a06-9eac-1daab9f13994 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.957864] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e65088dc-2d2d-4eda-a680-6a22b349e187 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Reconfigured VM instance to set the machine id {{(pid=62383) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 850.964101] env[62383]: DEBUG nova.compute.provider_tree [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 850.984101] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.059190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "a04a6a53-cca8-4e15-b840-cb1394e5b188" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.059563] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.059694] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "a04a6a53-cca8-4e15-b840-cb1394e5b188-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.059895] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.060306] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.062503] env[62383]: INFO nova.compute.manager [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Terminating instance [ 851.138286] env[62383]: DEBUG nova.compute.manager [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 851.138884] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 851.140978] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3babc628-f7dc-4d56-9313-3c9d55ba7b28 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.151757] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 851.151757] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75d40478-a75c-40ca-83fb-bf777f878c5b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.161162] env[62383]: DEBUG oslo_vmware.api [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 851.161162] env[62383]: value = "task-2451756" [ 851.161162] env[62383]: _type = "Task" [ 851.161162] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.174396] env[62383]: DEBUG oslo_vmware.api [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.186219] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451755, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.250076] env[62383]: DEBUG nova.network.neutron [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updated VIF entry in instance network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.250471] env[62383]: DEBUG nova.network.neutron [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.359935] env[62383]: DEBUG nova.compute.manager [req-e610ad90-ca0f-487b-96db-f1ada0e69e0b req-120f88f3-6f7a-4544-b119-a3f258170f6b service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Received event network-vif-deleted-79458cb2-668a-4c04-882f-c00f465ccd9d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 851.360221] env[62383]: INFO nova.compute.manager [req-e610ad90-ca0f-487b-96db-f1ada0e69e0b req-120f88f3-6f7a-4544-b119-a3f258170f6b service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Neutron deleted interface 79458cb2-668a-4c04-882f-c00f465ccd9d; detaching it from the instance and deleting it from the info cache [ 851.360720] env[62383]: DEBUG nova.network.neutron [req-e610ad90-ca0f-487b-96db-f1ada0e69e0b req-120f88f3-6f7a-4544-b119-a3f258170f6b service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.476518] env[62383]: INFO nova.compute.manager [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] instance snapshotting [ 851.482017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757b0266-4e9b-437e-ab17-9fe66fa7740c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.501694] env[62383]: ERROR nova.scheduler.client.report [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [req-fcb92515-f034-42c2-9354-d79c6496d197] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fcb92515-f034-42c2-9354-d79c6496d197"}]} [ 851.502996] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da44acc-336e-4f9b-8142-4d64ec146162 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.526355] env[62383]: DEBUG nova.scheduler.client.report [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 851.553239] env[62383]: DEBUG nova.scheduler.client.report [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 851.553239] env[62383]: DEBUG nova.compute.provider_tree [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 144, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 851.569192] env[62383]: DEBUG nova.compute.manager [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 851.569192] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 851.569192] env[62383]: DEBUG nova.scheduler.client.report [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 851.573543] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4873c3f-a58a-49a2-98a9-d4227981e3b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.578455] env[62383]: DEBUG nova.network.neutron [-] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.585660] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 851.585964] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c485e96-ecf7-4361-801d-67c94bcb3525 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.597763] env[62383]: DEBUG oslo_vmware.api [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 851.597763] env[62383]: value = "task-2451757" [ 851.597763] env[62383]: _type = "Task" [ 851.597763] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.604363] env[62383]: DEBUG nova.scheduler.client.report [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 851.612983] env[62383]: DEBUG oslo_vmware.api [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451757, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.677461] env[62383]: DEBUG oslo_vmware.api [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451756, 'name': PowerOffVM_Task, 'duration_secs': 0.381504} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.681558] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 851.681897] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 851.683060] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-165fd9cf-6bd4-413a-bdcb-7d5eb7c60028 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.694687] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451755, 'name': CloneVM_Task} progress is 95%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.752759] env[62383]: DEBUG oslo_concurrency.lockutils [req-0556a96f-7c94-4155-8cd9-206726456ef6 req-161efe04-ac11-4369-9a47-71cd98cb7a83 service nova] Releasing lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.753567] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquired lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.802484] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 851.802484] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 851.802484] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleting the datastore file [datastore2] 8a165d96-f503-4bc5-bff4-e6a85201e137 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 851.802484] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb3576ea-3df2-4245-a50c-72cc7f7927e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.818645] env[62383]: DEBUG oslo_vmware.api [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 851.818645] env[62383]: value = "task-2451759" [ 851.818645] env[62383]: _type = "Task" [ 851.818645] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.836175] env[62383]: DEBUG oslo_vmware.api [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.864626] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af0929d1-c58b-4f56-91ea-2e3b16e5f423 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.880047] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41af182f-6170-4225-bf5f-8948e91daaa1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.925452] env[62383]: DEBUG nova.compute.manager [req-e610ad90-ca0f-487b-96db-f1ada0e69e0b req-120f88f3-6f7a-4544-b119-a3f258170f6b service nova] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Detach interface failed, port_id=79458cb2-668a-4c04-882f-c00f465ccd9d, reason: Instance 93234e99-268f-491e-96bd-a77f4c9f164b could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 852.020655] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 852.021169] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cebff323-fac5-4ef7-85e7-29350ffbc87f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.033029] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 852.033029] env[62383]: value = "task-2451760" [ 852.033029] env[62383]: _type = "Task" [ 852.033029] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.047055] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451760, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.049945] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af378079-76f2-45a7-b659-0084af4206a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.059562] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15032137-352d-499b-bd27-e0cb57d46a12 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.106908] env[62383]: INFO nova.compute.manager [-] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Took 1.36 seconds to deallocate network for instance. [ 852.113703] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7842302f-feba-44c5-8671-bb5b5dd2a164 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.120884] env[62383]: DEBUG nova.network.neutron [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.130763] env[62383]: DEBUG oslo_vmware.api [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451757, 'name': PowerOffVM_Task, 'duration_secs': 0.274015} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.132855] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456a7211-29f0-42d9-9b9c-fccdececac93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.139606] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 852.139606] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 852.139606] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92045e4e-986d-4d11-a31f-3ec611431867 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.157020] env[62383]: DEBUG nova.compute.provider_tree [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 852.187595] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451755, 'name': CloneVM_Task, 'duration_secs': 1.202756} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.187926] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Created linked-clone VM from snapshot [ 852.188725] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4170f9-d0f8-4482-8e9f-5fca5bb387fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.197868] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Uploading image 8e2c3dcb-aaa3-4de4-975d-80766931b380 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 852.212928] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 852.213200] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3d7b841d-04c1-4d00-afc4-e171672e445c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.221350] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 852.221350] env[62383]: value = "task-2451762" [ 852.221350] env[62383]: _type = "Task" [ 852.221350] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.223144] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 852.223271] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 852.223456] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Deleting the datastore file [datastore2] a04a6a53-cca8-4e15-b840-cb1394e5b188 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.227608] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3011b2cd-7abf-4eff-9724-ce1c20f5c8d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.236875] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451762, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.238671] env[62383]: DEBUG oslo_vmware.api [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for the task: (returnval){ [ 852.238671] env[62383]: value = "task-2451763" [ 852.238671] env[62383]: _type = "Task" [ 852.238671] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.249376] env[62383]: DEBUG oslo_vmware.api [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451763, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.332099] env[62383]: DEBUG oslo_vmware.api [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15002} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.332339] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.332549] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 852.332735] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 852.332936] env[62383]: INFO nova.compute.manager [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Took 1.19 seconds to destroy the instance on the hypervisor. [ 852.333218] env[62383]: DEBUG oslo.service.loopingcall [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.333450] env[62383]: DEBUG nova.compute.manager [-] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 852.333521] env[62383]: DEBUG nova.network.neutron [-] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 852.532404] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.532871] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.533230] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.533803] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.533927] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.541136] env[62383]: INFO nova.compute.manager [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Terminating instance [ 852.551557] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451760, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.623762] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.699944] env[62383]: DEBUG nova.scheduler.client.report [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 852.700328] env[62383]: DEBUG nova.compute.provider_tree [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 100 to 101 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 852.700521] env[62383]: DEBUG nova.compute.provider_tree [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 852.741599] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451762, 'name': Destroy_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.751259] env[62383]: DEBUG oslo_vmware.api [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Task: {'id': task-2451763, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143341} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.751591] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 852.751783] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 852.751972] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 852.752311] env[62383]: INFO nova.compute.manager [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Took 1.19 seconds to destroy the instance on the hypervisor. [ 852.752400] env[62383]: DEBUG oslo.service.loopingcall [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.752929] env[62383]: DEBUG nova.compute.manager [-] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 852.753050] env[62383]: DEBUG nova.network.neutron [-] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 853.005901] env[62383]: DEBUG nova.network.neutron [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.049352] env[62383]: DEBUG nova.compute.manager [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 853.049595] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.050401] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451760, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.051367] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0cf2ad4-7d09-49be-9007-34d1d1702ae8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.062427] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.062515] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eba5a163-248d-4503-924c-deb1bf7cd63f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.070667] env[62383]: DEBUG oslo_vmware.api [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 853.070667] env[62383]: value = "task-2451764" [ 853.070667] env[62383]: _type = "Task" [ 853.070667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.081018] env[62383]: DEBUG oslo_vmware.api [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451764, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.192645] env[62383]: DEBUG nova.compute.manager [req-95fe3afb-b452-488d-9288-996d6eb56edd req-58c48994-29c9-4c05-9803-ca0e7ef26bf6 service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Received event network-vif-deleted-ea141edc-2c5f-4ccb-9af1-fe4caec1c754 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.192645] env[62383]: INFO nova.compute.manager [req-95fe3afb-b452-488d-9288-996d6eb56edd req-58c48994-29c9-4c05-9803-ca0e7ef26bf6 service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Neutron deleted interface ea141edc-2c5f-4ccb-9af1-fe4caec1c754; detaching it from the instance and deleting it from the info cache [ 853.192645] env[62383]: DEBUG nova.network.neutron [req-95fe3afb-b452-488d-9288-996d6eb56edd req-58c48994-29c9-4c05-9803-ca0e7ef26bf6 service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.208216] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.832s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.208847] env[62383]: DEBUG nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 853.211682] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.806s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.212067] env[62383]: DEBUG nova.objects.instance [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lazy-loading 'resources' on Instance uuid 9604eadf-a027-46dd-989b-0d4b752f883a {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.238203] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451762, 'name': Destroy_Task, 'duration_secs': 0.798862} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.238604] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Destroyed the VM [ 853.240868] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 853.241741] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b059492e-f14a-4081-996f-c9c640ce4f4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.251763] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 853.251763] env[62383]: value = "task-2451765" [ 853.251763] env[62383]: _type = "Task" [ 853.251763] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.261477] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451765, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.401146] env[62383]: DEBUG nova.compute.manager [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Received event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 853.401423] env[62383]: DEBUG nova.compute.manager [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing instance network info cache due to event network-changed-c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 853.401626] env[62383]: DEBUG oslo_concurrency.lockutils [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] Acquiring lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.463490] env[62383]: DEBUG nova.network.neutron [-] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.508908] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Releasing lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.509239] env[62383]: DEBUG nova.compute.manager [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Inject network info {{(pid=62383) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 853.509556] env[62383]: DEBUG nova.compute.manager [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] network_info to inject: |[{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 853.518032] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Reconfiguring VM instance to set the machine id {{(pid=62383) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 853.518032] env[62383]: DEBUG oslo_concurrency.lockutils [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] Acquired lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.518032] env[62383]: DEBUG nova.network.neutron [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Refreshing network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.518032] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f83de317-e7f0-4ec6-9d4a-411ebeaf0a25 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.542287] env[62383]: DEBUG oslo_vmware.api [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 853.542287] env[62383]: value = "task-2451766" [ 853.542287] env[62383]: _type = "Task" [ 853.542287] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.549058] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451760, 'name': CreateSnapshot_Task, 'duration_secs': 1.200788} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.549781] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 853.550594] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62ba704-fff5-4d68-a447-a3255bfc302f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.556988] env[62383]: DEBUG oslo_vmware.api [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451766, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.582103] env[62383]: DEBUG oslo_vmware.api [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451764, 'name': PowerOffVM_Task, 'duration_secs': 0.467028} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.582414] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.582654] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 853.582962] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f12cf76-89d1-4515-bf5c-d91f28717dfb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.636701] env[62383]: DEBUG nova.network.neutron [-] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.694887] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e816725-2a95-4e2b-94d3-1b361cc3d5b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.707389] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e8cdd0-c020-41b0-bfa4-de4002070829 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.720847] env[62383]: DEBUG nova.compute.utils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.722740] env[62383]: DEBUG nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 853.722740] env[62383]: DEBUG nova.network.neutron [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 853.769319] env[62383]: DEBUG nova.compute.manager [req-95fe3afb-b452-488d-9288-996d6eb56edd req-58c48994-29c9-4c05-9803-ca0e7ef26bf6 service nova] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Detach interface failed, port_id=ea141edc-2c5f-4ccb-9af1-fe4caec1c754, reason: Instance a04a6a53-cca8-4e15-b840-cb1394e5b188 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 853.783072] env[62383]: DEBUG nova.policy [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db7e9998210e485fa855f0375f63ad55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35016a724e7e4fa2b0fc19396d8e736b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 853.789066] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451765, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.891412] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 853.891721] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 853.891928] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Deleting the datastore file [datastore1] 5ef22e87-f73c-47ba-b925-2bd2effe74eb {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.892374] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f908b6c3-3e0c-4c9d-8d50-bc877d028f30 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.906018] env[62383]: DEBUG oslo_vmware.api [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for the task: (returnval){ [ 853.906018] env[62383]: value = "task-2451768" [ 853.906018] env[62383]: _type = "Task" [ 853.906018] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.919400] env[62383]: DEBUG oslo_vmware.api [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.968422] env[62383]: INFO nova.compute.manager [-] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Took 1.63 seconds to deallocate network for instance. [ 853.999571] env[62383]: DEBUG nova.network.neutron [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updated VIF entry in instance network info cache for port c0a30947-ef63-4154-9495-4bb92c6a0578. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.999571] env[62383]: DEBUG nova.network.neutron [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [{"id": "c0a30947-ef63-4154-9495-4bb92c6a0578", "address": "fa:16:3e:ca:da:18", "network": {"id": "6eb819a0-f2d3-4b88-a271-99bbdfdb2f52", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-359095630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.229", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "89a4cd88e497492da719341b40576b18", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "158692b5-b9fb-49e8-9903-e742ffd6c168", "external-id": "nsx-vlan-transportzone-769", "segmentation_id": 769, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0a30947-ef", "ovs_interfaceid": "c0a30947-ef63-4154-9495-4bb92c6a0578", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.065875] env[62383]: DEBUG oslo_vmware.api [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451766, 'name': ReconfigVM_Task, 'duration_secs': 0.16974} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.066192] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d306b01-2bf8-4f4a-8f0b-9b427e813d1c tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Reconfigured VM instance to set the machine id {{(pid=62383) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 854.086806] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 854.086806] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-83a081ad-8c0b-4bb5-a317-a78629a8e1c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.096979] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 854.096979] env[62383]: value = "task-2451769" [ 854.096979] env[62383]: _type = "Task" [ 854.096979] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.115258] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451769, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.143054] env[62383]: INFO nova.compute.manager [-] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Took 1.39 seconds to deallocate network for instance. [ 854.206107] env[62383]: DEBUG nova.network.neutron [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Successfully created port: 4f32c615-7f5d-420b-b198-2f48895523d6 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 854.226591] env[62383]: DEBUG nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 854.284416] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451765, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.304615] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab0e5bd-6c47-4c0d-a154-883f67bcf7c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.313163] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bffeffa-ab09-43d5-8a58-03fae577244d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.356430] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.356798] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.357139] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.357490] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.357731] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.360657] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b852be3-b1ed-47f9-b218-4e69a3912ab9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.364217] env[62383]: INFO nova.compute.manager [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Terminating instance [ 854.374162] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c84bf4-f2fb-4447-bc7f-b5a7ed501e0e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.392184] env[62383]: DEBUG nova.compute.provider_tree [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.415140] env[62383]: DEBUG oslo_vmware.api [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Task: {'id': task-2451768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162163} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.415411] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.415638] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 854.415832] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 854.416018] env[62383]: INFO nova.compute.manager [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Took 1.37 seconds to destroy the instance on the hypervisor. [ 854.417176] env[62383]: DEBUG oslo.service.loopingcall [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.417176] env[62383]: DEBUG nova.compute.manager [-] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 854.417176] env[62383]: DEBUG nova.network.neutron [-] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 854.486578] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.506149] env[62383]: DEBUG oslo_concurrency.lockutils [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] Releasing lock "refresh_cache-57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.506440] env[62383]: DEBUG nova.compute.manager [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Received event network-vif-deleted-925071ab-96dd-4c80-901e-9dba6c4a5a9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 854.506696] env[62383]: INFO nova.compute.manager [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Neutron deleted interface 925071ab-96dd-4c80-901e-9dba6c4a5a9c; detaching it from the instance and deleting it from the info cache [ 854.506837] env[62383]: DEBUG nova.network.neutron [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.610525] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451769, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.616255] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "40719661-5955-48ec-b289-b37896dd04df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.616528] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "40719661-5955-48ec-b289-b37896dd04df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.657388] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.783571] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451765, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.876981] env[62383]: DEBUG nova.compute.manager [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 854.876981] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.876981] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61abc5cf-5f05-4661-aad0-671a1e31d3d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.885411] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.885676] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d03e207a-247b-4962-8a6c-5b43b08f72dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.897023] env[62383]: DEBUG oslo_vmware.api [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 854.897023] env[62383]: value = "task-2451770" [ 854.897023] env[62383]: _type = "Task" [ 854.897023] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.897683] env[62383]: DEBUG nova.scheduler.client.report [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.910321] env[62383]: DEBUG oslo_vmware.api [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451770, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.010472] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-778ac3f6-6716-4bb6-a500-6eab34d2bc98 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.027480] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69b5fa2-b01e-4fb8-808d-b3f5ff267095 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.068857] env[62383]: DEBUG nova.compute.manager [req-4697dbf3-1120-4fdc-9029-3dab318efd5e req-a4d400b3-dbff-431c-9e10-4ac477542bf0 service nova] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Detach interface failed, port_id=925071ab-96dd-4c80-901e-9dba6c4a5a9c, reason: Instance 8a165d96-f503-4bc5-bff4-e6a85201e137 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 855.106200] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451769, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.119150] env[62383]: DEBUG nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 855.224436] env[62383]: DEBUG nova.network.neutron [-] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.240702] env[62383]: DEBUG nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 855.272099] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 855.272372] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.272617] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 855.272715] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.272934] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 855.273083] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 855.273318] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 855.273483] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 855.273660] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 855.273824] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 855.274053] env[62383]: DEBUG nova.virt.hardware [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 855.275375] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b2431f-2049-4206-a0b6-e9bda822e097 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.283896] env[62383]: DEBUG nova.compute.manager [req-a01738a5-cdc5-4f32-a70d-3e4becdc85bc req-c0a7392d-6a4e-4fd1-b09b-5172176a3f6c service nova] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Received event network-vif-deleted-d2a0fa64-9d35-4dfd-8124-3099b780d7b7 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 855.293698] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5467d3f0-e059-4a31-930c-2485ed62459e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.298436] env[62383]: DEBUG oslo_vmware.api [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451765, 'name': RemoveSnapshot_Task, 'duration_secs': 1.894395} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.298693] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 855.403850] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.192s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.406511] env[62383]: DEBUG oslo_vmware.api [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451770, 'name': PowerOffVM_Task, 'duration_secs': 0.199735} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.407071] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.779s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.407289] env[62383]: DEBUG nova.objects.instance [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lazy-loading 'resources' on Instance uuid da16da02-25ab-46f9-9070-9fdde0b3a75e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.410419] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.410604] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.410933] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56e9360a-71e0-4675-ace6-6d1ae45a1d35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.435560] env[62383]: INFO nova.scheduler.client.report [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Deleted allocations for instance 9604eadf-a027-46dd-989b-0d4b752f883a [ 855.487995] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.488280] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.488474] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Deleting the datastore file [datastore1] 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.488773] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-661e4b31-9833-4b24-a252-7d2c38ac5639 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.497456] env[62383]: DEBUG oslo_vmware.api [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for the task: (returnval){ [ 855.497456] env[62383]: value = "task-2451772" [ 855.497456] env[62383]: _type = "Task" [ 855.497456] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.507858] env[62383]: DEBUG oslo_vmware.api [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451772, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.609147] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451769, 'name': CloneVM_Task, 'duration_secs': 1.482728} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.609457] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Created linked-clone VM from snapshot [ 855.610237] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8e2a13-3fd8-4f1a-80c4-73f6989235ff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.620941] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Uploading image 2d173d61-3079-43d0-8982-7bc84898b336 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 855.641455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.658011] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 855.658011] env[62383]: value = "vm-496498" [ 855.658011] env[62383]: _type = "VirtualMachine" [ 855.658011] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 855.658011] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2ac9d4cf-93e7-47d7-904d-995f1dafddf3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.667266] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease: (returnval){ [ 855.667266] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5228a4e5-1433-e880-9efb-f9d307e8e616" [ 855.667266] env[62383]: _type = "HttpNfcLease" [ 855.667266] env[62383]: } obtained for exporting VM: (result){ [ 855.667266] env[62383]: value = "vm-496498" [ 855.667266] env[62383]: _type = "VirtualMachine" [ 855.667266] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 855.667575] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the lease: (returnval){ [ 855.667575] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5228a4e5-1433-e880-9efb-f9d307e8e616" [ 855.667575] env[62383]: _type = "HttpNfcLease" [ 855.667575] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 855.676377] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 855.676377] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5228a4e5-1433-e880-9efb-f9d307e8e616" [ 855.676377] env[62383]: _type = "HttpNfcLease" [ 855.676377] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 855.725457] env[62383]: DEBUG nova.compute.manager [req-eb061fe6-e8b7-458d-ad63-96dc61c94419 req-b7fd52a0-e5dc-4f6c-961f-36d1756fe137 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Received event network-vif-plugged-4f32c615-7f5d-420b-b198-2f48895523d6 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 855.725687] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb061fe6-e8b7-458d-ad63-96dc61c94419 req-b7fd52a0-e5dc-4f6c-961f-36d1756fe137 service nova] Acquiring lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.725899] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb061fe6-e8b7-458d-ad63-96dc61c94419 req-b7fd52a0-e5dc-4f6c-961f-36d1756fe137 service nova] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.726080] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb061fe6-e8b7-458d-ad63-96dc61c94419 req-b7fd52a0-e5dc-4f6c-961f-36d1756fe137 service nova] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.726278] env[62383]: DEBUG nova.compute.manager [req-eb061fe6-e8b7-458d-ad63-96dc61c94419 req-b7fd52a0-e5dc-4f6c-961f-36d1756fe137 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] No waiting events found dispatching network-vif-plugged-4f32c615-7f5d-420b-b198-2f48895523d6 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 855.726405] env[62383]: WARNING nova.compute.manager [req-eb061fe6-e8b7-458d-ad63-96dc61c94419 req-b7fd52a0-e5dc-4f6c-961f-36d1756fe137 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Received unexpected event network-vif-plugged-4f32c615-7f5d-420b-b198-2f48895523d6 for instance with vm_state building and task_state spawning. [ 855.727077] env[62383]: INFO nova.compute.manager [-] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Took 1.31 seconds to deallocate network for instance. [ 855.797799] env[62383]: DEBUG nova.network.neutron [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Successfully updated port: 4f32c615-7f5d-420b-b198-2f48895523d6 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 855.803590] env[62383]: WARNING nova.compute.manager [None req-da7bdf4c-fc7e-41db-8ea4-5b392c3501c4 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Image not found during snapshot: nova.exception.ImageNotFound: Image 8e2c3dcb-aaa3-4de4-975d-80766931b380 could not be found. [ 855.945315] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e1f0d533-ba75-4f39-82e2-5325fc4e0941 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "9604eadf-a027-46dd-989b-0d4b752f883a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.934s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.010424] env[62383]: DEBUG oslo_vmware.api [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Task: {'id': task-2451772, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138274} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.010681] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.010872] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.011129] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.011350] env[62383]: INFO nova.compute.manager [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Took 1.14 seconds to destroy the instance on the hypervisor. [ 856.011603] env[62383]: DEBUG oslo.service.loopingcall [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.011784] env[62383]: DEBUG nova.compute.manager [-] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 856.011874] env[62383]: DEBUG nova.network.neutron [-] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 856.176981] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 856.176981] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5228a4e5-1433-e880-9efb-f9d307e8e616" [ 856.176981] env[62383]: _type = "HttpNfcLease" [ 856.176981] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 856.176981] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 856.176981] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5228a4e5-1433-e880-9efb-f9d307e8e616" [ 856.176981] env[62383]: _type = "HttpNfcLease" [ 856.176981] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 856.177881] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadd611f-0b42-4d66-8ad6-b615de9bca09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.192028] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604042-bd7d-b68a-c9aa-c7058164ec2b/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 856.192028] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604042-bd7d-b68a-c9aa-c7058164ec2b/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 856.258106] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.302825] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "refresh_cache-4b3e6064-4462-45e7-b5dd-f2fc22422c3e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.303508] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "refresh_cache-4b3e6064-4462-45e7-b5dd-f2fc22422c3e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.303508] env[62383]: DEBUG nova.network.neutron [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.319506] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7fe3d6c5-4fa2-4d87-92f8-5cf68fd854b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.361992] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73ebc59-ee94-46dc-9ed4-40169934e0ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.370479] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97897cdb-fb2a-48f9-8770-3afc375eff14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.410473] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7caf5b7a-2ce9-4ac1-8366-49d08e0617b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.420013] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3020c07d-8905-453c-95b8-1945d111423a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.435821] env[62383]: DEBUG nova.compute.provider_tree [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.826351] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "2eba2920-7912-475b-a198-890743aa5255" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.826677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "2eba2920-7912-475b-a198-890743aa5255" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.826870] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "2eba2920-7912-475b-a198-890743aa5255-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.827068] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "2eba2920-7912-475b-a198-890743aa5255-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.827252] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "2eba2920-7912-475b-a198-890743aa5255-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.829843] env[62383]: INFO nova.compute.manager [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Terminating instance [ 856.872850] env[62383]: DEBUG nova.network.neutron [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 856.938457] env[62383]: DEBUG nova.scheduler.client.report [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 856.997539] env[62383]: DEBUG nova.network.neutron [-] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.058421] env[62383]: DEBUG nova.network.neutron [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Updating instance_info_cache with network_info: [{"id": "4f32c615-7f5d-420b-b198-2f48895523d6", "address": "fa:16:3e:80:6c:12", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f32c615-7f", "ovs_interfaceid": "4f32c615-7f5d-420b-b198-2f48895523d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.335041] env[62383]: DEBUG nova.compute.manager [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 857.335041] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.335427] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf36f26-17d7-4e2f-8c18-f9881b0b12ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.348963] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.349506] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3e5780d-2118-4a09-a9d9-ab7f8904ca92 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.360345] env[62383]: DEBUG oslo_vmware.api [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 857.360345] env[62383]: value = "task-2451774" [ 857.360345] env[62383]: _type = "Task" [ 857.360345] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.370885] env[62383]: DEBUG oslo_vmware.api [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451774, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.450488] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.453312] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.521s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.453903] env[62383]: DEBUG nova.objects.instance [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lazy-loading 'resources' on Instance uuid ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.479580] env[62383]: INFO nova.scheduler.client.report [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted allocations for instance da16da02-25ab-46f9-9070-9fdde0b3a75e [ 857.500401] env[62383]: INFO nova.compute.manager [-] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Took 1.49 seconds to deallocate network for instance. [ 857.550550] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.550942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.551336] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.551633] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.551865] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.554225] env[62383]: INFO nova.compute.manager [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Terminating instance [ 857.561825] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "refresh_cache-4b3e6064-4462-45e7-b5dd-f2fc22422c3e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.562161] env[62383]: DEBUG nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Instance network_info: |[{"id": "4f32c615-7f5d-420b-b198-2f48895523d6", "address": "fa:16:3e:80:6c:12", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f32c615-7f", "ovs_interfaceid": "4f32c615-7f5d-420b-b198-2f48895523d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 857.562721] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:6c:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f32c615-7f5d-420b-b198-2f48895523d6', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.570952] env[62383]: DEBUG oslo.service.loopingcall [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.572475] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 857.572773] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed2eb953-6ea5-4fe6-8d9f-02158861ceb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.594786] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.594786] env[62383]: value = "task-2451775" [ 857.594786] env[62383]: _type = "Task" [ 857.594786] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.604087] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451775, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.756466] env[62383]: DEBUG nova.compute.manager [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Received event network-changed-4f32c615-7f5d-420b-b198-2f48895523d6 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 857.756466] env[62383]: DEBUG nova.compute.manager [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Refreshing instance network info cache due to event network-changed-4f32c615-7f5d-420b-b198-2f48895523d6. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 857.756466] env[62383]: DEBUG oslo_concurrency.lockutils [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] Acquiring lock "refresh_cache-4b3e6064-4462-45e7-b5dd-f2fc22422c3e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.756466] env[62383]: DEBUG oslo_concurrency.lockutils [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] Acquired lock "refresh_cache-4b3e6064-4462-45e7-b5dd-f2fc22422c3e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.756466] env[62383]: DEBUG nova.network.neutron [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Refreshing network info cache for port 4f32c615-7f5d-420b-b198-2f48895523d6 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 857.871511] env[62383]: DEBUG oslo_vmware.api [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451774, 'name': PowerOffVM_Task, 'duration_secs': 0.265044} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.871831] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.871965] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.872641] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d30c703a-a032-4fca-b864-6c8c70be7789 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.960414] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.960840] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.961462] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleting the datastore file [datastore1] 2eba2920-7912-475b-a198-890743aa5255 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.962470] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc386608-52d3-4f4f-a848-9e98a956abba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.970341] env[62383]: DEBUG oslo_vmware.api [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 857.970341] env[62383]: value = "task-2451777" [ 857.970341] env[62383]: _type = "Task" [ 857.970341] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.980946] env[62383]: DEBUG oslo_vmware.api [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451777, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.988360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6f69e528-fd84-4a7e-a9f9-dadb78a2fa16 tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "da16da02-25ab-46f9-9070-9fdde0b3a75e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.913s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.012599] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.060127] env[62383]: DEBUG nova.compute.manager [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 858.060518] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 858.063662] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd2a7ba-5a4e-44e2-865b-53bfb6909aca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.072567] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.072832] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e50d70f2-7c01-4c37-8e40-3b78be9606e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.080895] env[62383]: DEBUG oslo_vmware.api [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 858.080895] env[62383]: value = "task-2451778" [ 858.080895] env[62383]: _type = "Task" [ 858.080895] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.090014] env[62383]: DEBUG oslo_vmware.api [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451778, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.105978] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451775, 'name': CreateVM_Task, 'duration_secs': 0.466017} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.108587] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 858.109471] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.109649] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.109962] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 858.110232] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61c44a32-2360-41d9-b069-b6b647df3fb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.116088] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 858.116088] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52048a78-e7af-4071-d199-8ff5c96603a4" [ 858.116088] env[62383]: _type = "Task" [ 858.116088] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.127122] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52048a78-e7af-4071-d199-8ff5c96603a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.343896] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2118aae1-4703-40fa-89ee-2b039d66fadc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.355665] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f07192-6059-4587-985d-3d915dde057e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.393022] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f6fb16-0dc1-45f7-a7c4-20cbde572284 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.402048] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b1be1a-db3e-4685-b71d-62bbc73dfe0b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.418257] env[62383]: DEBUG nova.compute.provider_tree [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.481755] env[62383]: DEBUG oslo_vmware.api [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451777, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176985} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.484101] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.484306] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.484485] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.484661] env[62383]: INFO nova.compute.manager [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 2eba2920-7912-475b-a198-890743aa5255] Took 1.15 seconds to destroy the instance on the hypervisor. [ 858.484910] env[62383]: DEBUG oslo.service.loopingcall [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.485127] env[62383]: DEBUG nova.compute.manager [-] [instance: 2eba2920-7912-475b-a198-890743aa5255] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 858.485222] env[62383]: DEBUG nova.network.neutron [-] [instance: 2eba2920-7912-475b-a198-890743aa5255] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 858.519079] env[62383]: DEBUG nova.network.neutron [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Updated VIF entry in instance network info cache for port 4f32c615-7f5d-420b-b198-2f48895523d6. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 858.519449] env[62383]: DEBUG nova.network.neutron [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Updating instance_info_cache with network_info: [{"id": "4f32c615-7f5d-420b-b198-2f48895523d6", "address": "fa:16:3e:80:6c:12", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f32c615-7f", "ovs_interfaceid": "4f32c615-7f5d-420b-b198-2f48895523d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.592183] env[62383]: DEBUG oslo_vmware.api [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451778, 'name': PowerOffVM_Task, 'duration_secs': 0.229786} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.592476] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 858.592645] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 858.592902] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7f87758-b829-4a26-accb-d2371216bd2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.630357] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52048a78-e7af-4071-d199-8ff5c96603a4, 'name': SearchDatastore_Task, 'duration_secs': 0.015388} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.631535] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.631762] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.631984] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.632252] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.632309] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.634937] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0b6c846-5c61-495c-8a57-51a0f21a3b8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.648025] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.648025] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.648495] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ac96e5a-71af-4558-8f2a-cca7a783d373 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.654162] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 858.654162] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b811fb-947b-6e8f-b31d-9ace8ebd3725" [ 858.654162] env[62383]: _type = "Task" [ 858.654162] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.664677] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b811fb-947b-6e8f-b31d-9ace8ebd3725, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.672047] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 858.672281] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 858.672465] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Deleting the datastore file [datastore2] a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 858.672809] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fec8c269-4448-4bf9-9634-a3326ea45d65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.681779] env[62383]: DEBUG oslo_vmware.api [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for the task: (returnval){ [ 858.681779] env[62383]: value = "task-2451780" [ 858.681779] env[62383]: _type = "Task" [ 858.681779] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.692695] env[62383]: DEBUG oslo_vmware.api [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.879328] env[62383]: DEBUG nova.compute.manager [req-5c0da682-819c-4afe-9560-47f58ac81017 req-0d6af23f-55ab-455e-aab3-b4abc4022138 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Received event network-vif-deleted-9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 858.879583] env[62383]: INFO nova.compute.manager [req-5c0da682-819c-4afe-9560-47f58ac81017 req-0d6af23f-55ab-455e-aab3-b4abc4022138 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Neutron deleted interface 9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b; detaching it from the instance and deleting it from the info cache [ 858.879848] env[62383]: DEBUG nova.network.neutron [req-5c0da682-819c-4afe-9560-47f58ac81017 req-0d6af23f-55ab-455e-aab3-b4abc4022138 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.923803] env[62383]: DEBUG nova.scheduler.client.report [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.022619] env[62383]: DEBUG oslo_concurrency.lockutils [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] Releasing lock "refresh_cache-4b3e6064-4462-45e7-b5dd-f2fc22422c3e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.022950] env[62383]: DEBUG nova.compute.manager [req-e51d8a7f-34c0-4355-a8e8-0510531d1da2 req-6cb6b9c7-ca09-46a4-b331-99870258c319 service nova] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Received event network-vif-deleted-c0a30947-ef63-4154-9495-4bb92c6a0578 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 859.138474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.138792] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.139029] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.139226] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.139400] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.142096] env[62383]: INFO nova.compute.manager [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Terminating instance [ 859.166104] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b811fb-947b-6e8f-b31d-9ace8ebd3725, 'name': SearchDatastore_Task, 'duration_secs': 0.020148} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.166950] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34b4e247-d119-4349-9443-faf80c4bb093 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.173134] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 859.173134] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524b30a1-29ff-2797-ac51-82964048a282" [ 859.173134] env[62383]: _type = "Task" [ 859.173134] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.181315] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524b30a1-29ff-2797-ac51-82964048a282, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.191462] env[62383]: DEBUG oslo_vmware.api [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Task: {'id': task-2451780, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205391} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.191725] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.191913] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.192122] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.192306] env[62383]: INFO nova.compute.manager [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Took 1.13 seconds to destroy the instance on the hypervisor. [ 859.192547] env[62383]: DEBUG oslo.service.loopingcall [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 859.192740] env[62383]: DEBUG nova.compute.manager [-] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 859.192837] env[62383]: DEBUG nova.network.neutron [-] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 859.314765] env[62383]: DEBUG nova.network.neutron [-] [instance: 2eba2920-7912-475b-a198-890743aa5255] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.382417] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-befd2ea1-60cb-496c-8c95-60dece6bbe8b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.394349] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4e05eb-16b1-4deb-9ad0-70c8c8cfba0e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.442620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.447255] env[62383]: DEBUG nova.compute.manager [req-5c0da682-819c-4afe-9560-47f58ac81017 req-0d6af23f-55ab-455e-aab3-b4abc4022138 service nova] [instance: 2eba2920-7912-475b-a198-890743aa5255] Detach interface failed, port_id=9117f7a4-bb7b-4d52-98a7-ed5a863b7d9b, reason: Instance 2eba2920-7912-475b-a198-890743aa5255 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 859.447820] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.623s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.448083] env[62383]: DEBUG nova.objects.instance [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lazy-loading 'resources' on Instance uuid 80821717-f961-49c7-8b79-c152edfdfb94 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 859.473535] env[62383]: INFO nova.scheduler.client.report [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Deleted allocations for instance ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c [ 859.645935] env[62383]: DEBUG nova.compute.manager [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 859.646216] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 859.647180] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24798ab0-0d66-4a38-b3c7-28d9b3380dba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.655666] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.655914] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef073253-602b-4be7-91d3-b0605980b199 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.664335] env[62383]: DEBUG oslo_vmware.api [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 859.664335] env[62383]: value = "task-2451781" [ 859.664335] env[62383]: _type = "Task" [ 859.664335] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.675049] env[62383]: DEBUG oslo_vmware.api [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451781, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.685346] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524b30a1-29ff-2797-ac51-82964048a282, 'name': SearchDatastore_Task, 'duration_secs': 0.024739} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.685638] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.686065] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4b3e6064-4462-45e7-b5dd-f2fc22422c3e/4b3e6064-4462-45e7-b5dd-f2fc22422c3e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 859.686262] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d5a6bb3-00a5-44cc-b67e-247ddb7fd778 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.695084] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 859.695084] env[62383]: value = "task-2451782" [ 859.695084] env[62383]: _type = "Task" [ 859.695084] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.703994] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451782, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.817538] env[62383]: INFO nova.compute.manager [-] [instance: 2eba2920-7912-475b-a198-890743aa5255] Took 1.33 seconds to deallocate network for instance. [ 859.961612] env[62383]: DEBUG nova.network.neutron [-] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.983749] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f79ad811-5b75-428a-b555-61c558bf8d70 tempest-ServerRescueTestJSON-461177918 tempest-ServerRescueTestJSON-461177918-project-member] Lock "ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.606s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.177869] env[62383]: DEBUG oslo_vmware.api [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451781, 'name': PowerOffVM_Task, 'duration_secs': 0.260435} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.181035] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 860.181256] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 860.181760] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88b90833-d2ef-40e9-a39e-ed8184cd901b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.205048] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451782, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.266196] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 860.266467] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 860.266617] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleting the datastore file [datastore2] 1a740010-ddd0-4df6-8ae6-02f1ed50137f {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.267875] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a87a4b34-fb47-4aa9-b9b2-b6ed5158465a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.276333] env[62383]: DEBUG oslo_vmware.api [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for the task: (returnval){ [ 860.276333] env[62383]: value = "task-2451784" [ 860.276333] env[62383]: _type = "Task" [ 860.276333] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.289603] env[62383]: DEBUG oslo_vmware.api [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.326144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.367836] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8242436a-e13b-498c-b266-1e84febae8f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.377021] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4158554-7d42-4506-ad1a-a15741195b94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.412015] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bfb5be-eaef-4cfe-aed7-32491b0cd4d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.420657] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d8e290-ff24-4959-9854-c2c2a4de2f44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.435540] env[62383]: DEBUG nova.compute.provider_tree [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.465702] env[62383]: INFO nova.compute.manager [-] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Took 1.27 seconds to deallocate network for instance. [ 860.707631] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451782, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569176} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.708073] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4b3e6064-4462-45e7-b5dd-f2fc22422c3e/4b3e6064-4462-45e7-b5dd-f2fc22422c3e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.708431] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.708827] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-936288f5-62ed-4438-8f7e-beb172b3e687 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.718085] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 860.718085] env[62383]: value = "task-2451785" [ 860.718085] env[62383]: _type = "Task" [ 860.718085] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.730701] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.791599] env[62383]: DEBUG oslo_vmware.api [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Task: {'id': task-2451784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200995} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.791972] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.792269] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 860.793415] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 860.793680] env[62383]: INFO nova.compute.manager [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 860.794027] env[62383]: DEBUG oslo.service.loopingcall [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.794224] env[62383]: DEBUG nova.compute.manager [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 860.794326] env[62383]: DEBUG nova.network.neutron [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 860.938856] env[62383]: DEBUG nova.scheduler.client.report [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.972369] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.979016] env[62383]: DEBUG nova.compute.manager [req-6d6edca8-aef2-4e41-919a-a4f615514599 req-5e70e61c-b63f-49f7-b739-5d0576461ed7 service nova] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Received event network-vif-deleted-5205d6ef-091d-4460-bd6c-3b1c5873c3ea {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 861.232396] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087623} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.232713] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 861.233439] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d116ff73-cea5-451b-8008-681ad342f419 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.258731] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 4b3e6064-4462-45e7-b5dd-f2fc22422c3e/4b3e6064-4462-45e7-b5dd-f2fc22422c3e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 861.262670] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f477216-ef09-4c60-8787-b993750ccdbe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.281759] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 861.281759] env[62383]: value = "task-2451786" [ 861.281759] env[62383]: _type = "Task" [ 861.281759] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.296013] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451786, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.444368] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.446648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.632s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.448191] env[62383]: INFO nova.compute.claims [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.472928] env[62383]: INFO nova.scheduler.client.report [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Deleted allocations for instance 80821717-f961-49c7-8b79-c152edfdfb94 [ 861.706488] env[62383]: DEBUG nova.network.neutron [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.794403] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451786, 'name': ReconfigVM_Task, 'duration_secs': 0.493186} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.795090] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 4b3e6064-4462-45e7-b5dd-f2fc22422c3e/4b3e6064-4462-45e7-b5dd-f2fc22422c3e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.796272] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f56e34b-24d6-4796-9cf3-0fb2a7c6c0cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.805026] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 861.805026] env[62383]: value = "task-2451787" [ 861.805026] env[62383]: _type = "Task" [ 861.805026] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.814200] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451787, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.981964] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56b7a31a-94b1-42bf-bc7f-a02b9eaac155 tempest-VolumesAdminNegativeTest-1772884078 tempest-VolumesAdminNegativeTest-1772884078-project-member] Lock "80821717-f961-49c7-8b79-c152edfdfb94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.820s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.209461] env[62383]: INFO nova.compute.manager [-] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Took 1.41 seconds to deallocate network for instance. [ 862.316964] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451787, 'name': Rename_Task, 'duration_secs': 0.208862} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.318031] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 862.318031] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c82da6b0-a06c-4dec-928f-82835fbf2cf0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.324855] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 862.324855] env[62383]: value = "task-2451788" [ 862.324855] env[62383]: _type = "Task" [ 862.324855] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.333673] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.717479] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.807935] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e91e1c-d222-489f-900e-5a6e958fc31d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.818656] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b21b17d-691a-4021-bc08-11c9163599ac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.857117] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bfac21-c066-4fa4-ad3e-4b7dcaed4725 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.864918] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451788, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.868195] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e34b0e4-6538-4100-a7f3-d4b8ef89e93a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.881730] env[62383]: DEBUG nova.compute.provider_tree [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.008840] env[62383]: DEBUG nova.compute.manager [req-f61cef49-aa8c-496c-924d-f571365f2752 req-5113080a-aa0d-4937-8a63-bcea6353f0ba service nova] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Received event network-vif-deleted-b1d30299-f4ce-40b4-9046-fd1d10565fd3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 863.364610] env[62383]: DEBUG oslo_vmware.api [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451788, 'name': PowerOnVM_Task, 'duration_secs': 0.661062} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.365018] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 863.366164] env[62383]: INFO nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Took 8.12 seconds to spawn the instance on the hypervisor. [ 863.366490] env[62383]: DEBUG nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 863.367473] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f02b09-66c8-4070-966e-b6ce757e0ac2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.384897] env[62383]: DEBUG nova.scheduler.client.report [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.889946] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.890487] env[62383]: DEBUG nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 863.892822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.269s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.893085] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.895044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.409s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.895244] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.896938] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.240s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.897162] env[62383]: DEBUG nova.objects.instance [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lazy-loading 'resources' on Instance uuid a04a6a53-cca8-4e15-b840-cb1394e5b188 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.914359] env[62383]: INFO nova.compute.manager [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Took 26.20 seconds to build instance. [ 864.027460] env[62383]: INFO nova.scheduler.client.report [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted allocations for instance 93234e99-268f-491e-96bd-a77f4c9f164b [ 864.029907] env[62383]: INFO nova.scheduler.client.report [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted allocations for instance 8a165d96-f503-4bc5-bff4-e6a85201e137 [ 864.422939] env[62383]: DEBUG nova.compute.utils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.426137] env[62383]: DEBUG oslo_concurrency.lockutils [None req-25d12d76-de38-4ada-8829-62f873d37743 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.720s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.427731] env[62383]: DEBUG nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.427731] env[62383]: DEBUG nova.network.neutron [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.531849] env[62383]: DEBUG nova.policy [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bda8cb7b1005458ca6fc7e5ca6882e6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '603ba5501c904542b6ff0935f620e6da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 864.544211] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e3ca9cc3-a97b-4d9b-9f6a-3442543c6252 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "93234e99-268f-491e-96bd-a77f4c9f164b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.478s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.544888] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e0452cbe-a6fe-40b0-97d5-8995bf31c7f2 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "8a165d96-f503-4bc5-bff4-e6a85201e137" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.923s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.798157] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd7ac1d-3b10-4105-b484-22418965a725 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.805893] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6b36fd-714f-4a94-8074-58f16f917c59 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.839821] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff2654e-6480-4a91-b5f0-d173fd088197 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.846919] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fccfd7c-51c9-469a-a729-30d2ce9109ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.861298] env[62383]: DEBUG nova.compute.provider_tree [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.931547] env[62383]: DEBUG nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 865.028358] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604042-bd7d-b68a-c9aa-c7058164ec2b/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 865.029300] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a493c56f-0cd2-4920-a8aa-33556945fcb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.037100] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604042-bd7d-b68a-c9aa-c7058164ec2b/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 865.037100] env[62383]: ERROR oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604042-bd7d-b68a-c9aa-c7058164ec2b/disk-0.vmdk due to incomplete transfer. [ 865.037100] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e47a0f9b-081a-4433-8866-f006068bf5ce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.043894] env[62383]: DEBUG oslo_vmware.rw_handles [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52604042-bd7d-b68a-c9aa-c7058164ec2b/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 865.044241] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Uploaded image 2d173d61-3079-43d0-8982-7bc84898b336 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 865.046863] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 865.047546] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4c1fe95c-2274-4154-ba43-51b0407973c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.054666] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 865.054666] env[62383]: value = "task-2451789" [ 865.054666] env[62383]: _type = "Task" [ 865.054666] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.066308] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451789, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.365101] env[62383]: DEBUG nova.scheduler.client.report [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 865.370643] env[62383]: DEBUG nova.network.neutron [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Successfully created port: 913ca293-96ad-478e-96f7-b0b1697a3b0d {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.568399] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451789, 'name': Destroy_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.696719] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.697070] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.697302] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.697494] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.697723] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.699858] env[62383]: INFO nova.compute.manager [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Terminating instance [ 865.874947] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.878269] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.237s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.879990] env[62383]: INFO nova.compute.claims [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.911585] env[62383]: INFO nova.scheduler.client.report [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Deleted allocations for instance a04a6a53-cca8-4e15-b840-cb1394e5b188 [ 865.941651] env[62383]: DEBUG nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.975141] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.975141] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.975141] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.975325] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.975325] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.975325] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.975325] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.978874] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.979262] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.979564] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.981583] env[62383]: DEBUG nova.virt.hardware [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.982808] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a84b0ce-4481-45b1-93ad-b197a4ff1e0e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.991813] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3122623e-81d0-4619-88f2-65fa807d89a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.067746] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451789, 'name': Destroy_Task, 'duration_secs': 0.690888} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.068039] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Destroyed the VM [ 866.068286] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 866.068551] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-12dcb875-6c98-40e2-9da4-f6171e207a82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.076468] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 866.076468] env[62383]: value = "task-2451790" [ 866.076468] env[62383]: _type = "Task" [ 866.076468] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.089925] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451790, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.206207] env[62383]: DEBUG nova.compute.manager [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 866.206207] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.206207] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d6bbd0-a937-4c4a-829f-53ea5e48bdc7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.214473] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.214740] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c64a8a35-b061-4e3e-bf26-f92a33b888a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.220934] env[62383]: DEBUG oslo_vmware.api [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 866.220934] env[62383]: value = "task-2451791" [ 866.220934] env[62383]: _type = "Task" [ 866.220934] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.230856] env[62383]: DEBUG oslo_vmware.api [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.270952] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "eedc7859-3882-4837-9419-f9edce5f12fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.271228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.421371] env[62383]: DEBUG oslo_concurrency.lockutils [None req-581f9506-fae6-41e3-8fe0-12e0b7707612 tempest-ServersNegativeTestMultiTenantJSON-1060035224 tempest-ServersNegativeTestMultiTenantJSON-1060035224-project-member] Lock "a04a6a53-cca8-4e15-b840-cb1394e5b188" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.362s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.586619] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451790, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.730827] env[62383]: DEBUG oslo_vmware.api [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451791, 'name': PowerOffVM_Task, 'duration_secs': 0.226062} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.731158] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.731332] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.731610] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71c80552-5256-4175-895c-7aad5a803983 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.773913] env[62383]: DEBUG nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 866.798640] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.798898] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.799172] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleting the datastore file [datastore2] 4b3e6064-4462-45e7-b5dd-f2fc22422c3e {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.799456] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d4321ec-8788-488b-9cf4-23ec6fc78e0a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.807480] env[62383]: DEBUG oslo_vmware.api [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 866.807480] env[62383]: value = "task-2451793" [ 866.807480] env[62383]: _type = "Task" [ 866.807480] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.818148] env[62383]: DEBUG oslo_vmware.api [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.873329] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "506afe7c-f19b-4417-b097-485c0244a019" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.873603] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "506afe7c-f19b-4417-b097-485c0244a019" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.094715] env[62383]: DEBUG oslo_vmware.api [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451790, 'name': RemoveSnapshot_Task, 'duration_secs': 0.96634} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.095021] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 867.095304] env[62383]: INFO nova.compute.manager [None req-aa345b2e-90f7-441e-924e-ae96bcdca2ac tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Took 15.62 seconds to snapshot the instance on the hypervisor. [ 867.111844] env[62383]: DEBUG nova.compute.manager [req-00a6951f-0075-403f-b995-2a0e34b34c06 req-c03253dc-c714-4af1-9627-341b79ce951c service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Received event network-vif-plugged-913ca293-96ad-478e-96f7-b0b1697a3b0d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 867.112156] env[62383]: DEBUG oslo_concurrency.lockutils [req-00a6951f-0075-403f-b995-2a0e34b34c06 req-c03253dc-c714-4af1-9627-341b79ce951c service nova] Acquiring lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.112428] env[62383]: DEBUG oslo_concurrency.lockutils [req-00a6951f-0075-403f-b995-2a0e34b34c06 req-c03253dc-c714-4af1-9627-341b79ce951c service nova] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.112657] env[62383]: DEBUG oslo_concurrency.lockutils [req-00a6951f-0075-403f-b995-2a0e34b34c06 req-c03253dc-c714-4af1-9627-341b79ce951c service nova] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.112889] env[62383]: DEBUG nova.compute.manager [req-00a6951f-0075-403f-b995-2a0e34b34c06 req-c03253dc-c714-4af1-9627-341b79ce951c service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] No waiting events found dispatching network-vif-plugged-913ca293-96ad-478e-96f7-b0b1697a3b0d {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.113223] env[62383]: WARNING nova.compute.manager [req-00a6951f-0075-403f-b995-2a0e34b34c06 req-c03253dc-c714-4af1-9627-341b79ce951c service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Received unexpected event network-vif-plugged-913ca293-96ad-478e-96f7-b0b1697a3b0d for instance with vm_state building and task_state spawning. [ 867.205034] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "1e367665-1d4b-4686-ac79-c946423c1762" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.205274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.299093] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9624de2a-04df-4930-b368-173b0ab38468 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.303933] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.312223] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5651639-f5ed-41aa-b2e0-e18628c85e6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.320381] env[62383]: DEBUG oslo_vmware.api [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162165} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.349331] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.349628] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 867.349877] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 867.350156] env[62383]: INFO nova.compute.manager [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 867.350438] env[62383]: DEBUG oslo.service.loopingcall [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.351413] env[62383]: DEBUG nova.compute.manager [-] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.352032] env[62383]: DEBUG nova.network.neutron [-] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 867.355378] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e171a5aa-ab0b-4425-94f3-1537fa681191 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.365802] env[62383]: DEBUG nova.network.neutron [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Successfully updated port: 913ca293-96ad-478e-96f7-b0b1697a3b0d {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.367879] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574dc6db-fc35-457e-a35b-a3e8bccfffe7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.382839] env[62383]: DEBUG nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 867.387808] env[62383]: DEBUG nova.compute.provider_tree [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.708267] env[62383]: DEBUG nova.compute.utils [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.740951] env[62383]: DEBUG nova.compute.manager [req-238ad8c4-77a5-4972-a792-36484e85bd34 req-fe6c30cc-6b83-4149-ade3-3f7d26b61529 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Received event network-vif-deleted-4f32c615-7f5d-420b-b198-2f48895523d6 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 867.740951] env[62383]: INFO nova.compute.manager [req-238ad8c4-77a5-4972-a792-36484e85bd34 req-fe6c30cc-6b83-4149-ade3-3f7d26b61529 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Neutron deleted interface 4f32c615-7f5d-420b-b198-2f48895523d6; detaching it from the instance and deleting it from the info cache [ 867.740951] env[62383]: DEBUG nova.network.neutron [req-238ad8c4-77a5-4972-a792-36484e85bd34 req-fe6c30cc-6b83-4149-ade3-3f7d26b61529 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.873677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.873851] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.874018] env[62383]: DEBUG nova.network.neutron [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.896042] env[62383]: DEBUG nova.scheduler.client.report [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.919516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.202038] env[62383]: DEBUG nova.network.neutron [-] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.211384] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.243121] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc52909d-54df-4aec-ac84-76c25f3ae26d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.253643] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad15dc76-80ee-4099-a02c-ccc70bda16bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.289582] env[62383]: DEBUG nova.compute.manager [req-238ad8c4-77a5-4972-a792-36484e85bd34 req-fe6c30cc-6b83-4149-ade3-3f7d26b61529 service nova] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Detach interface failed, port_id=4f32c615-7f5d-420b-b198-2f48895523d6, reason: Instance 4b3e6064-4462-45e7-b5dd-f2fc22422c3e could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 868.401698] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.402448] env[62383]: DEBUG nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 868.406620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.149s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.406788] env[62383]: DEBUG nova.objects.instance [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lazy-loading 'resources' on Instance uuid 5ef22e87-f73c-47ba-b925-2bd2effe74eb {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.416970] env[62383]: DEBUG nova.network.neutron [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.668712] env[62383]: DEBUG nova.network.neutron [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance_info_cache with network_info: [{"id": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "address": "fa:16:3e:ee:69:75", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap913ca293-96", "ovs_interfaceid": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.706129] env[62383]: INFO nova.compute.manager [-] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Took 1.35 seconds to deallocate network for instance. [ 868.910458] env[62383]: DEBUG nova.compute.utils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 868.911898] env[62383]: DEBUG nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 868.912129] env[62383]: DEBUG nova.network.neutron [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 868.983202] env[62383]: DEBUG nova.policy [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '348e3fa52adf4a2aa2efd7070c0eda1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11325c08987e4a3ba4b8a9979a2204db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 869.156553] env[62383]: DEBUG nova.compute.manager [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Received event network-changed-913ca293-96ad-478e-96f7-b0b1697a3b0d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 869.156791] env[62383]: DEBUG nova.compute.manager [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Refreshing instance network info cache due to event network-changed-913ca293-96ad-478e-96f7-b0b1697a3b0d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 869.156986] env[62383]: DEBUG oslo_concurrency.lockutils [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] Acquiring lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.176433] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.176794] env[62383]: DEBUG nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Instance network_info: |[{"id": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "address": "fa:16:3e:ee:69:75", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap913ca293-96", "ovs_interfaceid": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 869.177455] env[62383]: DEBUG oslo_concurrency.lockutils [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] Acquired lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.177649] env[62383]: DEBUG nova.network.neutron [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Refreshing network info cache for port 913ca293-96ad-478e-96f7-b0b1697a3b0d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.179091] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:69:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '913ca293-96ad-478e-96f7-b0b1697a3b0d', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.187315] env[62383]: DEBUG oslo.service.loopingcall [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.190788] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.192188] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e21f075a-0ce6-4aa3-ae9a-e67c79b6b522 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.216077] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.223018] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.223018] env[62383]: value = "task-2451794" [ 869.223018] env[62383]: _type = "Task" [ 869.223018] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.232099] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451794, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.297845] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "1e367665-1d4b-4686-ac79-c946423c1762" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.298409] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.298520] env[62383]: INFO nova.compute.manager [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Attaching volume 34440909-fbf8-4f00-9d5a-dd07de7bcefa to /dev/sdb [ 869.343991] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d143b512-550e-4b8f-b55c-d6209b0e849f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.350526] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c437a101-f429-4522-b287-32b4c40ba45e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.358040] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c494175-c1ea-4055-9ef5-e860a57aa054 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.364297] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fe8bf2-6a8a-4a3a-8f62-717b7648288d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.394925] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9484503b-4ad0-49a5-8a81-32baae95a5da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.406484] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cfd804-73a5-47fe-9388-0447b386f001 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.414543] env[62383]: DEBUG nova.virt.block_device [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating existing volume attachment record: 46fbcf56-e643-4b98-802c-09693d37ace2 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 869.416785] env[62383]: DEBUG nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 869.427794] env[62383]: DEBUG nova.compute.provider_tree [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.739030] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451794, 'name': CreateVM_Task, 'duration_secs': 0.399404} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.739030] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 869.739030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.739030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.739030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 869.739030] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ececf0c-754a-4b96-954a-8c7516f2893d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.746226] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 869.746226] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522b7f28-23a3-ab54-f39e-0638580979b8" [ 869.746226] env[62383]: _type = "Task" [ 869.746226] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.759326] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522b7f28-23a3-ab54-f39e-0638580979b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.786537] env[62383]: DEBUG nova.network.neutron [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Successfully created port: 93551a4b-143c-4a41-b504-5294fbc26f56 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.937756] env[62383]: DEBUG nova.scheduler.client.report [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.269556] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522b7f28-23a3-ab54-f39e-0638580979b8, 'name': SearchDatastore_Task, 'duration_secs': 0.01511} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.270091] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.270471] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.271116] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.271401] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.271734] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.272634] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8432f3d-fc49-4ed4-860e-affe3c8febfc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.283671] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.283869] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.284641] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ad91ed4-806e-40fa-86dd-86645bae7bee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.290145] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 870.290145] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52822031-4c59-f849-3183-488e6534eb31" [ 870.290145] env[62383]: _type = "Task" [ 870.290145] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.299285] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52822031-4c59-f849-3183-488e6534eb31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.368670] env[62383]: DEBUG nova.network.neutron [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updated VIF entry in instance network info cache for port 913ca293-96ad-478e-96f7-b0b1697a3b0d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.368860] env[62383]: DEBUG nova.network.neutron [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance_info_cache with network_info: [{"id": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "address": "fa:16:3e:ee:69:75", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap913ca293-96", "ovs_interfaceid": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.446067] env[62383]: DEBUG nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 870.449062] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.451242] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.439s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.451471] env[62383]: DEBUG nova.objects.instance [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lazy-loading 'resources' on Instance uuid 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.484733] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 870.485117] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.485649] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.485649] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.485807] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.486032] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 870.486340] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 870.486577] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 870.486821] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 870.487072] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 870.487362] env[62383]: DEBUG nova.virt.hardware [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 870.489082] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcb0918-31d8-44f8-bd46-04d68bdd3dca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.494310] env[62383]: INFO nova.scheduler.client.report [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Deleted allocations for instance 5ef22e87-f73c-47ba-b925-2bd2effe74eb [ 870.511697] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c876777f-7c08-45ba-a076-ff8798a6559e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.803524] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52822031-4c59-f849-3183-488e6534eb31, 'name': SearchDatastore_Task, 'duration_secs': 0.010819} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.805180] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8d612e6-9bcc-44a5-82f1-d4a7344013fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.809830] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 870.809830] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52047332-88e9-af40-a899-d9cb293953c1" [ 870.809830] env[62383]: _type = "Task" [ 870.809830] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.818513] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52047332-88e9-af40-a899-d9cb293953c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.873583] env[62383]: DEBUG oslo_concurrency.lockutils [req-b16ecb5d-4d9b-428f-95e6-20b075b6c93f req-48bb9d88-4589-4bed-9488-e97dd6590f48 service nova] Releasing lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.015236] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d2323f32-e2e4-4959-a8a2-893dab73129c tempest-SecurityGroupsTestJSON-1885631499 tempest-SecurityGroupsTestJSON-1885631499-project-member] Lock "5ef22e87-f73c-47ba-b925-2bd2effe74eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.482s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.323841] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52047332-88e9-af40-a899-d9cb293953c1, 'name': SearchDatastore_Task, 'duration_secs': 0.021002} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.324517] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.324830] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 81921762-ac51-42d2-83dc-d5b6e904fbb7/81921762-ac51-42d2-83dc-d5b6e904fbb7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.327555] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ea6b9dd-c896-47b6-96d6-0f648e2e9e94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.334475] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 871.334475] env[62383]: value = "task-2451798" [ 871.334475] env[62383]: _type = "Task" [ 871.334475] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.342416] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.372035] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e208e7-23c3-4e62-85cd-d1bcaadb4e5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.380263] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4ba7c6-ea4f-42eb-b63d-8edc469cfe25 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.421877] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4aacc9-5e85-475c-a80d-f28344b2ea65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.431021] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8efe23f7-4891-4e91-8173-7e8a48d35d76 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.443282] env[62383]: DEBUG nova.compute.provider_tree [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.676614] env[62383]: DEBUG nova.network.neutron [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Successfully updated port: 93551a4b-143c-4a41-b504-5294fbc26f56 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.846204] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451798, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.922643] env[62383]: DEBUG nova.compute.manager [req-3f9dbb80-0fac-40e4-bb55-bd147e109962 req-2711662e-2622-4fe8-bec6-98cb3325ffb7 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Received event network-vif-plugged-93551a4b-143c-4a41-b504-5294fbc26f56 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 871.922643] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f9dbb80-0fac-40e4-bb55-bd147e109962 req-2711662e-2622-4fe8-bec6-98cb3325ffb7 service nova] Acquiring lock "40719661-5955-48ec-b289-b37896dd04df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.922985] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f9dbb80-0fac-40e4-bb55-bd147e109962 req-2711662e-2622-4fe8-bec6-98cb3325ffb7 service nova] Lock "40719661-5955-48ec-b289-b37896dd04df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.922985] env[62383]: DEBUG oslo_concurrency.lockutils [req-3f9dbb80-0fac-40e4-bb55-bd147e109962 req-2711662e-2622-4fe8-bec6-98cb3325ffb7 service nova] Lock "40719661-5955-48ec-b289-b37896dd04df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.923182] env[62383]: DEBUG nova.compute.manager [req-3f9dbb80-0fac-40e4-bb55-bd147e109962 req-2711662e-2622-4fe8-bec6-98cb3325ffb7 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] No waiting events found dispatching network-vif-plugged-93551a4b-143c-4a41-b504-5294fbc26f56 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.923326] env[62383]: WARNING nova.compute.manager [req-3f9dbb80-0fac-40e4-bb55-bd147e109962 req-2711662e-2622-4fe8-bec6-98cb3325ffb7 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Received unexpected event network-vif-plugged-93551a4b-143c-4a41-b504-5294fbc26f56 for instance with vm_state building and task_state spawning. [ 871.953301] env[62383]: DEBUG nova.scheduler.client.report [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.103727] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "4d58d2e6-171d-4346-b281-bcbd22286623" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.103839] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "4d58d2e6-171d-4346-b281-bcbd22286623" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.133664] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.133932] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.178321] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "refresh_cache-40719661-5955-48ec-b289-b37896dd04df" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.178477] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquired lock "refresh_cache-40719661-5955-48ec-b289-b37896dd04df" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.178635] env[62383]: DEBUG nova.network.neutron [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.349918] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451798, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.464313] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.464313] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.138s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.464313] env[62383]: DEBUG nova.objects.instance [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lazy-loading 'resources' on Instance uuid 2eba2920-7912-475b-a198-890743aa5255 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.496829] env[62383]: INFO nova.scheduler.client.report [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Deleted allocations for instance 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93 [ 872.548499] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.548499] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.611514] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 872.638923] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 872.721680] env[62383]: DEBUG nova.network.neutron [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 872.849679] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451798, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.875089] env[62383]: DEBUG nova.network.neutron [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Updating instance_info_cache with network_info: [{"id": "93551a4b-143c-4a41-b504-5294fbc26f56", "address": "fa:16:3e:e5:66:a9", "network": {"id": "32421969-dc86-47ae-9d44-b96488dd9fdb", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1541208270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11325c08987e4a3ba4b8a9979a2204db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93551a4b-14", "ovs_interfaceid": "93551a4b-143c-4a41-b504-5294fbc26f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.011415] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4b860ec0-e887-48a1-ba45-d6318fb276d1 tempest-AttachInterfacesUnderV243Test-1509461933 tempest-AttachInterfacesUnderV243Test-1509461933-project-member] Lock "57eaad0a-ca55-4bff-bbd0-6155ecf1cb93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.653s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.050591] env[62383]: DEBUG nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 873.133244] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.159156] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.347448] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451798, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.562556} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.348667] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 81921762-ac51-42d2-83dc-d5b6e904fbb7/81921762-ac51-42d2-83dc-d5b6e904fbb7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.349322] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.350197] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3bcab6-4085-49ff-a346-33dacd32fb82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.353129] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f90a720-44f3-44ce-9d25-df01e303475e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.359731] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c57769-654a-4a1a-bc6c-0232f5dd6e2d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.367202] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 873.367202] env[62383]: value = "task-2451800" [ 873.367202] env[62383]: _type = "Task" [ 873.367202] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.396056] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Releasing lock "refresh_cache-40719661-5955-48ec-b289-b37896dd04df" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.396056] env[62383]: DEBUG nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Instance network_info: |[{"id": "93551a4b-143c-4a41-b504-5294fbc26f56", "address": "fa:16:3e:e5:66:a9", "network": {"id": "32421969-dc86-47ae-9d44-b96488dd9fdb", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1541208270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11325c08987e4a3ba4b8a9979a2204db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93551a4b-14", "ovs_interfaceid": "93551a4b-143c-4a41-b504-5294fbc26f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 873.396907] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:66:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f77ff7a1-209c-4f3f-b2a0-fd817741e739', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93551a4b-143c-4a41-b504-5294fbc26f56', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.405043] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Creating folder: Project (11325c08987e4a3ba4b8a9979a2204db). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.405773] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472e1267-87c6-40d3-af24-e2a7fa244d59 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.411296] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b971dd6-36da-4885-8b2f-f3e82f263a53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.413087] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451800, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.418452] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e492feb1-397f-4825-a1b9-e189af247e98 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.423971] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Created folder: Project (11325c08987e4a3ba4b8a9979a2204db) in parent group-v496304. [ 873.424180] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Creating folder: Instances. Parent ref: group-v496503. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 873.424774] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48eaf108-8421-45b0-b54d-f4b9b0b0db5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.436112] env[62383]: DEBUG nova.compute.provider_tree [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.444741] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Created folder: Instances in parent group-v496503. [ 873.445023] env[62383]: DEBUG oslo.service.loopingcall [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 873.445243] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40719661-5955-48ec-b289-b37896dd04df] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.445418] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55cb8006-b8bf-4c24-8842-5c2dacddd80e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.466128] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.466128] env[62383]: value = "task-2451803" [ 873.466128] env[62383]: _type = "Task" [ 873.466128] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.475870] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451803, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.580745] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.879196] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063003} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.879498] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.880612] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875072ba-040c-41e7-9dad-2d98975251c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.909583] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 81921762-ac51-42d2-83dc-d5b6e904fbb7/81921762-ac51-42d2-83dc-d5b6e904fbb7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.910020] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8be157cd-26c9-4604-ba19-86383eff7e20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.939692] env[62383]: DEBUG nova.scheduler.client.report [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.946415] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 873.946415] env[62383]: value = "task-2451804" [ 873.946415] env[62383]: _type = "Task" [ 873.946415] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.956526] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451804, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.978644] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451803, 'name': CreateVM_Task, 'duration_secs': 0.337405} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.978644] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40719661-5955-48ec-b289-b37896dd04df] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.978644] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.978840] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.979202] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 873.979501] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2669c2b6-fa86-4acd-8d4d-e914a3d7259c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.984259] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 873.984259] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520e8df7-ab04-8e1d-3e6a-eaa16542174f" [ 873.984259] env[62383]: _type = "Task" [ 873.984259] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.992953] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520e8df7-ab04-8e1d-3e6a-eaa16542174f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.995414] env[62383]: DEBUG nova.compute.manager [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Received event network-changed-93551a4b-143c-4a41-b504-5294fbc26f56 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 873.995652] env[62383]: DEBUG nova.compute.manager [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Refreshing instance network info cache due to event network-changed-93551a4b-143c-4a41-b504-5294fbc26f56. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 873.995959] env[62383]: DEBUG oslo_concurrency.lockutils [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] Acquiring lock "refresh_cache-40719661-5955-48ec-b289-b37896dd04df" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.996127] env[62383]: DEBUG oslo_concurrency.lockutils [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] Acquired lock "refresh_cache-40719661-5955-48ec-b289-b37896dd04df" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.996338] env[62383]: DEBUG nova.network.neutron [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Refreshing network info cache for port 93551a4b-143c-4a41-b504-5294fbc26f56 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.448218] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.984s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.450864] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.479s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.451171] env[62383]: DEBUG nova.objects.instance [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lazy-loading 'resources' on Instance uuid a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.471450] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451804, 'name': ReconfigVM_Task, 'duration_secs': 0.264497} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.473036] env[62383]: INFO nova.scheduler.client.report [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleted allocations for instance 2eba2920-7912-475b-a198-890743aa5255 [ 874.475062] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 81921762-ac51-42d2-83dc-d5b6e904fbb7/81921762-ac51-42d2-83dc-d5b6e904fbb7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.478236] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8944704-2be4-47dd-babc-045f67c9ad13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.480802] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 874.481028] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496502', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'name': 'volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e367665-1d4b-4686-ac79-c946423c1762', 'attached_at': '', 'detached_at': '', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'serial': '34440909-fbf8-4f00-9d5a-dd07de7bcefa'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 874.481984] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804dd794-5eb9-4888-bc29-021cc53f40fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.503212] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a91b1e6-f756-448b-8a12-a0b538d80c43 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.505449] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 874.505449] env[62383]: value = "task-2451805" [ 874.505449] env[62383]: _type = "Task" [ 874.505449] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.509548] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520e8df7-ab04-8e1d-3e6a-eaa16542174f, 'name': SearchDatastore_Task, 'duration_secs': 0.009261} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.513029] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.513599] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.513850] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.514061] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.514191] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.535989] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d016bae8-b272-4088-ab70-9b2f35b887b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.544402] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa/volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.548043] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70f69d64-0fa6-420f-a0e2-ecc952dc74b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.569414] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451805, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.573726] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 874.573726] env[62383]: value = "task-2451806" [ 874.573726] env[62383]: _type = "Task" [ 874.573726] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.574956] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.575148] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.580039] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db48247f-223b-4e4c-8ee0-dfba122388a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.587118] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 874.587118] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5251f9c1-1826-fedf-6ffb-084c841cdb55" [ 874.587118] env[62383]: _type = "Task" [ 874.587118] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.588598] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451806, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.598061] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5251f9c1-1826-fedf-6ffb-084c841cdb55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.834961] env[62383]: DEBUG nova.network.neutron [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Updated VIF entry in instance network info cache for port 93551a4b-143c-4a41-b504-5294fbc26f56. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 874.835283] env[62383]: DEBUG nova.network.neutron [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Updating instance_info_cache with network_info: [{"id": "93551a4b-143c-4a41-b504-5294fbc26f56", "address": "fa:16:3e:e5:66:a9", "network": {"id": "32421969-dc86-47ae-9d44-b96488dd9fdb", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1541208270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11325c08987e4a3ba4b8a9979a2204db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f77ff7a1-209c-4f3f-b2a0-fd817741e739", "external-id": "nsx-vlan-transportzone-935", "segmentation_id": 935, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93551a4b-14", "ovs_interfaceid": "93551a4b-143c-4a41-b504-5294fbc26f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.985651] env[62383]: DEBUG oslo_concurrency.lockutils [None req-03e50aa3-74a1-4733-8b9d-d41d9b243c86 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "2eba2920-7912-475b-a198-890743aa5255" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.159s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.021935] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451805, 'name': Rename_Task, 'duration_secs': 0.187625} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.022049] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.022252] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dccff55c-bf7c-479a-8ade-ae78da78e806 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.029013] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 875.029013] env[62383]: value = "task-2451807" [ 875.029013] env[62383]: _type = "Task" [ 875.029013] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.036752] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451807, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.087215] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451806, 'name': ReconfigVM_Task, 'duration_secs': 0.464157} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.090040] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfigured VM instance instance-00000033 to attach disk [datastore2] volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa/volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.095762] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6853c64-707e-47b1-8a4c-11f36df65405 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.117775] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5251f9c1-1826-fedf-6ffb-084c841cdb55, 'name': SearchDatastore_Task, 'duration_secs': 0.012628} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.119675] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 875.119675] env[62383]: value = "task-2451808" [ 875.119675] env[62383]: _type = "Task" [ 875.119675] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.119896] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a9bba58-e12e-434c-8331-e85b9f34a51e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.133272] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451808, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.134303] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 875.134303] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5297fc22-067e-c4f1-8e27-1d1108c3b483" [ 875.134303] env[62383]: _type = "Task" [ 875.134303] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.144576] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5297fc22-067e-c4f1-8e27-1d1108c3b483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.342028] env[62383]: DEBUG oslo_concurrency.lockutils [req-a6c0c3a7-cfc8-472c-81cb-b5524e028e2c req-55bbaf70-2609-42a0-a0b7-c134cad103c9 service nova] Releasing lock "refresh_cache-40719661-5955-48ec-b289-b37896dd04df" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.454922] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9cb092-dc15-4f2c-b2f2-818a8b88abeb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.466385] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933a94aa-7e38-428d-9e84-a2e064234ec8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.500764] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d95b88b-56d1-4240-a940-7e6332c91512 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.508424] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b706176-964b-462a-873c-72e652f69fec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.522299] env[62383]: DEBUG nova.compute.provider_tree [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.537990] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451807, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.633609] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.643086] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5297fc22-067e-c4f1-8e27-1d1108c3b483, 'name': SearchDatastore_Task, 'duration_secs': 0.021834} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.643344] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.643600] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 40719661-5955-48ec-b289-b37896dd04df/40719661-5955-48ec-b289-b37896dd04df.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.643854] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fd6bcd6-69a1-4c19-953e-c06aeab44fda {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.651148] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 875.651148] env[62383]: value = "task-2451809" [ 875.651148] env[62383]: _type = "Task" [ 875.651148] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.659640] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.026117] env[62383]: DEBUG nova.scheduler.client.report [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 876.041111] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451807, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.139237] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451808, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.163040] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451809, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.380794] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "0f6b7094-27a0-4e97-98ac-bff857124b6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.381045] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.536259] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.540559] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.821s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.542232] env[62383]: DEBUG nova.objects.instance [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lazy-loading 'resources' on Instance uuid 1a740010-ddd0-4df6-8ae6-02f1ed50137f {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 876.552458] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451807, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.567266] env[62383]: INFO nova.scheduler.client.report [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Deleted allocations for instance a8d56b8e-fa11-4844-ab65-a2e5d24b1e07 [ 876.635827] env[62383]: DEBUG oslo_vmware.api [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451808, 'name': ReconfigVM_Task, 'duration_secs': 1.037531} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.635827] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496502', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'name': 'volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e367665-1d4b-4686-ac79-c946423c1762', 'attached_at': '', 'detached_at': '', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'serial': '34440909-fbf8-4f00-9d5a-dd07de7bcefa'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 876.665021] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451809, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535297} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.665021] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 40719661-5955-48ec-b289-b37896dd04df/40719661-5955-48ec-b289-b37896dd04df.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 876.665021] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 876.665021] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-905102e1-ddf4-45eb-97d3-8b30817549b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.671481] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 876.671481] env[62383]: value = "task-2451810" [ 876.671481] env[62383]: _type = "Task" [ 876.671481] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.683083] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.890250] env[62383]: DEBUG nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 877.041967] env[62383]: DEBUG oslo_vmware.api [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451807, 'name': PowerOnVM_Task, 'duration_secs': 1.684112} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.042263] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.042476] env[62383]: INFO nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Took 11.10 seconds to spawn the instance on the hypervisor. [ 877.042654] env[62383]: DEBUG nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 877.043895] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58b06f3-21b8-42d0-9ceb-3b9e61217fbe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.079043] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e7166bd2-4eae-444f-ad50-553a580cc665 tempest-MigrationsAdminTest-41819132 tempest-MigrationsAdminTest-41819132-project-member] Lock "a8d56b8e-fa11-4844-ab65-a2e5d24b1e07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.528s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.185311] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076554} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.192105] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.197218] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8be052-e318-43d6-9b3e-2dda9aeeeecf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.219720] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 40719661-5955-48ec-b289-b37896dd04df/40719661-5955-48ec-b289-b37896dd04df.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.223121] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd4ce5f2-b275-4e4a-972c-e4fcd81e6842 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.246596] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 877.246596] env[62383]: value = "task-2451811" [ 877.246596] env[62383]: _type = "Task" [ 877.246596] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.257276] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451811, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.424356] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.529462] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1705e0f9-bca8-400b-8158-a528343c2b2a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.536863] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adaf9d3-b9bd-4d4c-b6f5-5e06221f79b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.578419] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab485750-e869-4adb-94a7-613d9e1e3c5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.583121] env[62383]: INFO nova.compute.manager [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Took 26.79 seconds to build instance. [ 877.589560] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff2795b-1a06-4b44-b402-9c038304641d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.610306] env[62383]: DEBUG nova.compute.provider_tree [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.697625] env[62383]: DEBUG nova.objects.instance [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lazy-loading 'flavor' on Instance uuid 1e367665-1d4b-4686-ac79-c946423c1762 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 877.759312] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451811, 'name': ReconfigVM_Task, 'duration_secs': 0.308037} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.763124] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 40719661-5955-48ec-b289-b37896dd04df/40719661-5955-48ec-b289-b37896dd04df.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 877.763124] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e463bccd-a0c6-4b22-aed2-8f73727c4aed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.767871] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 877.767871] env[62383]: value = "task-2451812" [ 877.767871] env[62383]: _type = "Task" [ 877.767871] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.779395] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451812, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.988500] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "188d6b20-3dca-4c1c-8271-1871d2c992d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.988814] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.085682] env[62383]: INFO nova.compute.manager [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Rescuing [ 878.085682] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.085877] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.086139] env[62383]: DEBUG nova.network.neutron [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.089708] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1d53ea2e-c1af-4413-98fc-f5720523c8b0 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.310s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.113677] env[62383]: DEBUG nova.scheduler.client.report [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.206910] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c431e362-b959-45d3-b042-38f086b90bf9 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.909s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.226277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.226277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.226277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.226277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.227510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.227668] env[62383]: INFO nova.compute.manager [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Terminating instance [ 878.278957] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451812, 'name': Rename_Task, 'duration_secs': 0.155399} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.278957] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.278957] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c07edbf-dc7f-4c6a-b6bd-af5da2337a04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.286544] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 878.286544] env[62383]: value = "task-2451813" [ 878.286544] env[62383]: _type = "Task" [ 878.286544] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.303382] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451813, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.492057] env[62383]: DEBUG nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.619876] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.081s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.624925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.318s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.624925] env[62383]: INFO nova.compute.claims [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.652030] env[62383]: INFO nova.scheduler.client.report [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Deleted allocations for instance 1a740010-ddd0-4df6-8ae6-02f1ed50137f [ 878.735785] env[62383]: DEBUG nova.compute.manager [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.736119] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.737131] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669085b2-1b1a-40fa-b702-bedf8be77339 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.745970] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.746213] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8de6e19a-6eca-4dd8-aba5-482539e82aac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.752408] env[62383]: DEBUG oslo_vmware.api [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 878.752408] env[62383]: value = "task-2451814" [ 878.752408] env[62383]: _type = "Task" [ 878.752408] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.760642] env[62383]: DEBUG oslo_vmware.api [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451814, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.797253] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.021263] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.146637] env[62383]: DEBUG nova.network.neutron [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.163736] env[62383]: DEBUG oslo_concurrency.lockutils [None req-57291a34-aecb-4107-9630-8e9922e80c7e tempest-ServersAdminTestJSON-1927862429 tempest-ServersAdminTestJSON-1927862429-project-member] Lock "1a740010-ddd0-4df6-8ae6-02f1ed50137f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.024s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.262023] env[62383]: DEBUG oslo_vmware.api [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451814, 'name': PowerOffVM_Task, 'duration_secs': 0.402692} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.262487] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.263917] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.263917] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a605add-7abf-4afe-b4bb-ab0efaa36e4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.298741] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451813, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.650718] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.680100] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.680342] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.804017] env[62383]: DEBUG oslo_vmware.api [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451813, 'name': PowerOnVM_Task, 'duration_secs': 1.221702} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.804470] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.804706] env[62383]: INFO nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Took 9.36 seconds to spawn the instance on the hypervisor. [ 879.804936] env[62383]: DEBUG nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.805787] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d06de5-45b4-4c78-99f0-dfb2acba1134 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.025075] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518f571f-fe50-46b1-9a08-dfaa6749f9c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.032687] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cc9f0c-3b2c-4759-8d44-0d65357e8fb8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.062174] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31900a91-d9d3-491f-afcc-0b0600c82c18 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.073021] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b034b8-df56-4235-bb18-df024a9b3a3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.085727] env[62383]: DEBUG nova.compute.provider_tree [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.194463] env[62383]: DEBUG nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 880.205058] env[62383]: DEBUG nova.compute.manager [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 880.299219] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.299219] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.299219] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Deleting the datastore file [datastore1] 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.299219] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-793627be-2ae4-4fd0-8fcb-9d74f9000c15 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.307280] env[62383]: DEBUG oslo_vmware.api [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for the task: (returnval){ [ 880.307280] env[62383]: value = "task-2451816" [ 880.307280] env[62383]: _type = "Task" [ 880.307280] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.315374] env[62383]: DEBUG oslo_vmware.api [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.330656] env[62383]: INFO nova.compute.manager [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Took 24.71 seconds to build instance. [ 880.589977] env[62383]: DEBUG nova.scheduler.client.report [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.727536] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.730778] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.817424] env[62383]: DEBUG oslo_vmware.api [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Task: {'id': task-2451816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160606} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.817424] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.817424] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.817701] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.817942] env[62383]: INFO nova.compute.manager [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Took 2.08 seconds to destroy the instance on the hypervisor. [ 880.818255] env[62383]: DEBUG oslo.service.loopingcall [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.818552] env[62383]: DEBUG nova.compute.manager [-] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.819018] env[62383]: DEBUG nova.network.neutron [-] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.832849] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5d2346db-b5f9-418f-8325-27f01b35780c tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "40719661-5955-48ec-b289-b37896dd04df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.216s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.098463] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.099574] env[62383]: DEBUG nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 881.103559] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.184s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.108029] env[62383]: INFO nova.compute.claims [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.214099] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.214099] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcc1d761-aacc-4909-ac53-4f968ce79ef7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.225074] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 881.225074] env[62383]: value = "task-2451817" [ 881.225074] env[62383]: _type = "Task" [ 881.225074] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.233393] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.614047] env[62383]: DEBUG nova.compute.utils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.618633] env[62383]: DEBUG nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 881.618923] env[62383]: DEBUG nova.network.neutron [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.651918] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.652049] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.652343] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.652424] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.652620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.656729] env[62383]: INFO nova.compute.manager [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Terminating instance [ 881.715637] env[62383]: DEBUG nova.policy [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cce49ef14f3a474c9448607425da3dc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2439f3d802f34027b12d50f242a54ba3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 881.740430] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451817, 'name': PowerOffVM_Task, 'duration_secs': 0.248147} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.740627] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.743190] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd77928-6dd5-42c3-bfcf-e98b85feefd5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.770694] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c3b995-25d8-49c5-853a-3835a8637fb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.825707] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 881.825814] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e81e496-00db-48b0-811f-3ae5a7cf98a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.834865] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 881.834865] env[62383]: value = "task-2451818" [ 881.834865] env[62383]: _type = "Task" [ 881.834865] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.846758] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 881.847962] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.848850] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.848850] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.848850] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.849142] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18c001d5-0d6a-481f-ad80-7568df89ee6c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.858904] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.859182] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 881.859970] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55fdbad1-7286-4c4f-a5d4-fcc9e31938b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.867078] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 881.867078] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52535f8e-5014-2158-6a2e-e059a8cf9b62" [ 881.867078] env[62383]: _type = "Task" [ 881.867078] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.875537] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52535f8e-5014-2158-6a2e-e059a8cf9b62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.883688] env[62383]: DEBUG nova.compute.manager [req-4820c003-319a-43d7-a17e-063f9518a363 req-3a2d1ad8-d3a9-450e-8558-176eeec62c4e service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Received event network-vif-deleted-daaf9854-b852-4045-8380-ae136341958b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 881.883688] env[62383]: INFO nova.compute.manager [req-4820c003-319a-43d7-a17e-063f9518a363 req-3a2d1ad8-d3a9-450e-8558-176eeec62c4e service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Neutron deleted interface daaf9854-b852-4045-8380-ae136341958b; detaching it from the instance and deleting it from the info cache [ 881.883688] env[62383]: DEBUG nova.network.neutron [req-4820c003-319a-43d7-a17e-063f9518a363 req-3a2d1ad8-d3a9-450e-8558-176eeec62c4e service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.119773] env[62383]: DEBUG nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 882.172601] env[62383]: DEBUG nova.compute.manager [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 882.172601] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 882.174221] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa3d2df-d3a1-4fc2-bb10-085c4b93f38e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.185315] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 882.187956] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d6ad213-4f68-44e7-bacd-27c0f969dbb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.195675] env[62383]: DEBUG oslo_vmware.api [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 882.195675] env[62383]: value = "task-2451819" [ 882.195675] env[62383]: _type = "Task" [ 882.195675] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.203786] env[62383]: DEBUG oslo_vmware.api [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451819, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.286257] env[62383]: DEBUG nova.network.neutron [-] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.311621] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.315175] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.315398] env[62383]: DEBUG nova.compute.manager [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.316480] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f373f7f-da5f-4efd-b7f6-98d54cfbce59 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.325818] env[62383]: DEBUG nova.compute.manager [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 882.326685] env[62383]: DEBUG nova.objects.instance [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'flavor' on Instance uuid 2f028680-8db4-474a-8f24-880c4702877b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.384450] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52535f8e-5014-2158-6a2e-e059a8cf9b62, 'name': SearchDatastore_Task, 'duration_secs': 0.009316} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.384450] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74a79c1d-4f17-4ebd-98a4-5e4078ec8a40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.385942] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06024fa8-e771-4e72-b429-7ee9ac40ba82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.393991] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 882.393991] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520f26cc-2076-925f-bef7-2cf3756bf0fb" [ 882.393991] env[62383]: _type = "Task" [ 882.393991] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.408662] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bda6ac-8414-4d44-95b6-839f7f1e7a16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.425793] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520f26cc-2076-925f-bef7-2cf3756bf0fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.451733] env[62383]: DEBUG nova.compute.manager [req-4820c003-319a-43d7-a17e-063f9518a363 req-3a2d1ad8-d3a9-450e-8558-176eeec62c4e service nova] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Detach interface failed, port_id=daaf9854-b852-4045-8380-ae136341958b, reason: Instance 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 882.507021] env[62383]: DEBUG nova.network.neutron [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Successfully created port: 6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.617846] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22f04b1-1760-49a2-bb49-832f380f1f2d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.625793] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9b9cac-374c-4acd-b58a-0ca01213bb6b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.659548] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6def72-97af-4882-b0b3-ab367b84033f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.662348] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "40719661-5955-48ec-b289-b37896dd04df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.662566] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "40719661-5955-48ec-b289-b37896dd04df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.662761] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "40719661-5955-48ec-b289-b37896dd04df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.662930] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "40719661-5955-48ec-b289-b37896dd04df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.663121] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "40719661-5955-48ec-b289-b37896dd04df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.665307] env[62383]: INFO nova.compute.manager [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Terminating instance [ 882.671010] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e55e55-38e8-431b-9102-fd2c28ea87d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.687642] env[62383]: DEBUG nova.compute.provider_tree [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.705958] env[62383]: DEBUG oslo_vmware.api [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451819, 'name': PowerOffVM_Task, 'duration_secs': 0.271171} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.706234] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 882.707642] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 882.707642] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1eac1401-1486-42d9-ade5-7931e9d5b499 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.768009] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 882.768347] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 882.768420] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Deleting the datastore file [datastore2] e41f5c22-44e0-4de8-a4d0-865fe2c6febd {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 882.768679] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc66010c-c679-49e9-9520-3905fc9498e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.776553] env[62383]: DEBUG oslo_vmware.api [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for the task: (returnval){ [ 882.776553] env[62383]: value = "task-2451821" [ 882.776553] env[62383]: _type = "Task" [ 882.776553] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.788208] env[62383]: DEBUG oslo_vmware.api [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.788639] env[62383]: INFO nova.compute.manager [-] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Took 1.97 seconds to deallocate network for instance. [ 882.835668] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.836114] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.905479] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520f26cc-2076-925f-bef7-2cf3756bf0fb, 'name': SearchDatastore_Task, 'duration_secs': 0.017581} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.906072] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.906072] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. {{(pid=62383) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 882.906301] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04733fac-61dd-4f80-9a97-f6cfe0c5b109 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.912575] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 882.912575] env[62383]: value = "task-2451822" [ 882.912575] env[62383]: _type = "Task" [ 882.912575] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.921870] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451822, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.134964] env[62383]: DEBUG nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 883.161580] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 883.161854] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.161977] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.162187] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.162347] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.162478] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 883.162692] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 883.162982] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 883.163060] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 883.163187] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 883.163357] env[62383]: DEBUG nova.virt.hardware [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 883.164266] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12860f02-af0a-4c58-bd8e-81731d12f9bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.172663] env[62383]: DEBUG nova.compute.manager [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.172893] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.173855] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e5d42f-a8a7-4ab6-a6bc-6718e2574dc5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.177815] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db607000-9bc9-4471-a9ee-c30a80f68c4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.194654] env[62383]: DEBUG nova.scheduler.client.report [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.201458] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.205042] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b323bc2-569d-4b87-b7cf-a63fef31e00a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.209488] env[62383]: DEBUG oslo_vmware.api [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 883.209488] env[62383]: value = "task-2451823" [ 883.209488] env[62383]: _type = "Task" [ 883.209488] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.217904] env[62383]: DEBUG oslo_vmware.api [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451823, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.291268] env[62383]: DEBUG oslo_vmware.api [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Task: {'id': task-2451821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219863} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.291268] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 883.291268] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 883.291268] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 883.291268] env[62383]: INFO nova.compute.manager [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Took 1.12 seconds to destroy the instance on the hypervisor. [ 883.291603] env[62383]: DEBUG oslo.service.loopingcall [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.291603] env[62383]: DEBUG nova.compute.manager [-] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 883.291603] env[62383]: DEBUG nova.network.neutron [-] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 883.296489] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.341385] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.341385] env[62383]: DEBUG nova.compute.utils [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 883.341946] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-679bb1d5-5955-4ac1-81cb-065902fe121e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.350766] env[62383]: DEBUG oslo_vmware.api [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 883.350766] env[62383]: value = "task-2451824" [ 883.350766] env[62383]: _type = "Task" [ 883.350766] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.364813] env[62383]: DEBUG oslo_vmware.api [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.428281] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451822, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.704842] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.705591] env[62383]: DEBUG nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 883.714247] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.497s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.714247] env[62383]: DEBUG nova.objects.instance [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lazy-loading 'resources' on Instance uuid 4b3e6064-4462-45e7-b5dd-f2fc22422c3e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.726769] env[62383]: DEBUG oslo_vmware.api [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451823, 'name': PowerOffVM_Task, 'duration_secs': 0.231047} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.726982] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.727211] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.727465] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc2fee78-1885-4942-b5b1-2fd4ec8f0382 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.790025] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 883.790275] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 883.790820] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Deleting the datastore file [datastore2] 40719661-5955-48ec-b289-b37896dd04df {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 883.791130] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1be59917-eef7-461a-8aaf-b06905012015 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.799176] env[62383]: DEBUG oslo_vmware.api [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for the task: (returnval){ [ 883.799176] env[62383]: value = "task-2451826" [ 883.799176] env[62383]: _type = "Task" [ 883.799176] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.808754] env[62383]: DEBUG oslo_vmware.api [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.846763] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.867430] env[62383]: DEBUG oslo_vmware.api [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451824, 'name': PowerOffVM_Task, 'duration_secs': 0.354315} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.867617] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.867858] env[62383]: DEBUG nova.compute.manager [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 883.869361] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbd397c-f1d0-4090-b487-14142ca94019 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.930563] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451822, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51643} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.932148] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. [ 883.932897] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852fa278-5ce7-47bc-a1a5-ce24ffa9d288 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.965020] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.965723] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6eef7424-c2ab-4f94-83e6-49414b3dfd94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.987862] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 883.987862] env[62383]: value = "task-2451827" [ 883.987862] env[62383]: _type = "Task" [ 883.987862] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.996742] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451827, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.216293] env[62383]: DEBUG nova.compute.utils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.218080] env[62383]: DEBUG nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 884.218080] env[62383]: DEBUG nova.network.neutron [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 884.274331] env[62383]: DEBUG nova.policy [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6324b3a4f5a24752b0bef1b5d79ea2ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fabc88f824a44c57b19a07a605fb89fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 884.312509] env[62383]: DEBUG oslo_vmware.api [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Task: {'id': task-2451826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.382006} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.312801] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.313092] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.313251] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.314330] env[62383]: INFO nova.compute.manager [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] [instance: 40719661-5955-48ec-b289-b37896dd04df] Took 1.14 seconds to destroy the instance on the hypervisor. [ 884.314330] env[62383]: DEBUG oslo.service.loopingcall [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.314330] env[62383]: DEBUG nova.compute.manager [-] [instance: 40719661-5955-48ec-b289-b37896dd04df] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.314330] env[62383]: DEBUG nova.network.neutron [-] [instance: 40719661-5955-48ec-b289-b37896dd04df] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.352254] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "c117e858-696f-43dc-9182-70380214737f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.352996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "c117e858-696f-43dc-9182-70380214737f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.370361] env[62383]: DEBUG nova.network.neutron [-] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.387580] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2fa75d1-5dd9-46d6-832c-cedff917183c tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.074s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.506439] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451827, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.537828] env[62383]: DEBUG nova.network.neutron [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Successfully updated port: 6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 884.723708] env[62383]: DEBUG nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 884.786936] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d1518c-dfc9-48c1-a998-bd51d84cce94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.795142] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fb34c5-ea1e-44ef-9179-ab60f73ad21d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.839038] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45a8a07-bcf5-4453-bd48-9703028f7a29 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.849795] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb49547d-d673-4f5d-959f-aa20e0a503a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.855270] env[62383]: DEBUG nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 884.869545] env[62383]: DEBUG nova.compute.provider_tree [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.871787] env[62383]: DEBUG nova.network.neutron [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Successfully created port: b32e6644-0a0d-489f-810c-598958631523 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.874764] env[62383]: INFO nova.compute.manager [-] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Took 1.58 seconds to deallocate network for instance. [ 884.876180] env[62383]: DEBUG nova.compute.manager [req-b1d19c40-b15c-4dbc-a5be-99517ef14c9c req-a0827d86-afc6-4cc5-9148-f2ece52dc478 service nova] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Received event network-vif-deleted-66ff13c1-430d-414e-a83a-3d9e7536688a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 884.960509] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.960689] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.961411] env[62383]: INFO nova.compute.manager [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Attaching volume 46a8babb-a07c-4277-8a54-ff1a519becfb to /dev/sdb [ 885.001420] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451827, 'name': ReconfigVM_Task, 'duration_secs': 0.671851} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.001734] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.002979] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3deeb2d6-5dde-4e14-9f2a-3f2596968059 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.038278] env[62383]: DEBUG nova.compute.manager [req-062a4c0f-7924-422f-8e0e-a64dbe93494a req-10174f8f-8ebe-4965-8009-ba3caca95857 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Received event network-vif-plugged-6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 885.039210] env[62383]: DEBUG oslo_concurrency.lockutils [req-062a4c0f-7924-422f-8e0e-a64dbe93494a req-10174f8f-8ebe-4965-8009-ba3caca95857 service nova] Acquiring lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.039210] env[62383]: DEBUG oslo_concurrency.lockutils [req-062a4c0f-7924-422f-8e0e-a64dbe93494a req-10174f8f-8ebe-4965-8009-ba3caca95857 service nova] Lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.039210] env[62383]: DEBUG oslo_concurrency.lockutils [req-062a4c0f-7924-422f-8e0e-a64dbe93494a req-10174f8f-8ebe-4965-8009-ba3caca95857 service nova] Lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.039210] env[62383]: DEBUG nova.compute.manager [req-062a4c0f-7924-422f-8e0e-a64dbe93494a req-10174f8f-8ebe-4965-8009-ba3caca95857 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] No waiting events found dispatching network-vif-plugged-6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 885.039397] env[62383]: WARNING nova.compute.manager [req-062a4c0f-7924-422f-8e0e-a64dbe93494a req-10174f8f-8ebe-4965-8009-ba3caca95857 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Received unexpected event network-vif-plugged-6ba23e44-2c77-442d-9aee-5a75d8abab68 for instance with vm_state building and task_state spawning. [ 885.039831] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f26e3d1f-d982-4673-b7f0-2c2842f78e82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.050756] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e24f2d9-37e7-4005-97d6-0ab1a6582154 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.053553] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.053694] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.053844] env[62383]: DEBUG nova.network.neutron [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.062066] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bfaf45-a0a6-4d4e-ac5b-b42d556cb3a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.065939] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 885.065939] env[62383]: value = "task-2451828" [ 885.065939] env[62383]: _type = "Task" [ 885.065939] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.084336] env[62383]: DEBUG nova.virt.block_device [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Updating existing volume attachment record: 387c6aef-1b93-41bc-8dbe-be1571e33add {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 885.084698] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451828, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.345048] env[62383]: DEBUG nova.network.neutron [-] [instance: 40719661-5955-48ec-b289-b37896dd04df] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.378483] env[62383]: DEBUG nova.scheduler.client.report [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.386659] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.394731] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.575370] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451828, 'name': ReconfigVM_Task, 'duration_secs': 0.181941} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.575645] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.575892] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0aa5add-e7cd-4cc0-854f-46f1a231adfe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.582964] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 885.582964] env[62383]: value = "task-2451832" [ 885.582964] env[62383]: _type = "Task" [ 885.582964] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.587751] env[62383]: DEBUG nova.network.neutron [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.593116] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.738029] env[62383]: DEBUG nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 885.741158] env[62383]: DEBUG nova.network.neutron [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.772688] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='01ecca79b6062ec41e55ff551a9a8e64',container_format='bare',created_at=2025-02-11T15:29:35Z,direct_url=,disk_format='vmdk',id=2d173d61-3079-43d0-8982-7bc84898b336,min_disk=1,min_ram=0,name='tempest-test-snap-1632981387',owner='fabc88f824a44c57b19a07a605fb89fd',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-02-11T15:29:50Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 885.772931] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.773100] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.773294] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.773443] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.773590] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 885.773799] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 885.773960] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 885.774187] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 885.774357] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 885.774531] env[62383]: DEBUG nova.virt.hardware [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 885.775390] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dee64e8-7815-4ca0-8797-b41caa04392f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.784472] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06aad906-2fd0-4372-9e16-42d6addd9263 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.838496] env[62383]: DEBUG nova.objects.instance [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'flavor' on Instance uuid 2f028680-8db4-474a-8f24-880c4702877b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.848130] env[62383]: INFO nova.compute.manager [-] [instance: 40719661-5955-48ec-b289-b37896dd04df] Took 1.53 seconds to deallocate network for instance. [ 885.885353] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.174s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.888961] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.756s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.890900] env[62383]: INFO nova.compute.claims [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.918372] env[62383]: INFO nova.scheduler.client.report [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted allocations for instance 4b3e6064-4462-45e7-b5dd-f2fc22422c3e [ 886.097294] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451832, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.243967] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.244353] env[62383]: DEBUG nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Instance network_info: |[{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 886.244789] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:55:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ba23e44-2c77-442d-9aee-5a75d8abab68', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.252663] env[62383]: DEBUG oslo.service.loopingcall [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.252906] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 886.253147] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a2ba718-759d-43e6-85b5-12bb60e921d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.274304] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.274304] env[62383]: value = "task-2451833" [ 886.274304] env[62383]: _type = "Task" [ 886.274304] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.284656] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451833, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.343909] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.344113] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.344353] env[62383]: DEBUG nova.network.neutron [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.344537] env[62383]: DEBUG nova.objects.instance [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'info_cache' on Instance uuid 2f028680-8db4-474a-8f24-880c4702877b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.356241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.392972] env[62383]: DEBUG nova.network.neutron [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Successfully updated port: b32e6644-0a0d-489f-810c-598958631523 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.427616] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5284e2ff-d8fb-4f3d-8900-2a8cc36a551d tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "4b3e6064-4462-45e7-b5dd-f2fc22422c3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.730s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.594413] env[62383]: DEBUG oslo_vmware.api [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451832, 'name': PowerOnVM_Task, 'duration_secs': 0.578727} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.594837] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.601066] env[62383]: DEBUG nova.compute.manager [None req-5cddd14a-c848-4d78-8c52-34bacf4ae176 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.602029] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99518d1d-f6a7-42f2-b11c-65b74365dd78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.786233] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451833, 'name': CreateVM_Task, 'duration_secs': 0.353664} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.786423] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 886.787141] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.787316] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.787643] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 886.788229] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cbf7802-f999-4fab-9d7b-76ded608ca3a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.793370] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 886.793370] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5270b87b-e2ab-769c-dbe8-c435575d7b68" [ 886.793370] env[62383]: _type = "Task" [ 886.793370] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.801894] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5270b87b-e2ab-769c-dbe8-c435575d7b68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.849575] env[62383]: DEBUG nova.objects.base [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Object Instance<2f028680-8db4-474a-8f24-880c4702877b> lazy-loaded attributes: flavor,info_cache {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 886.896101] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "refresh_cache-506afe7c-f19b-4417-b097-485c0244a019" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.896494] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "refresh_cache-506afe7c-f19b-4417-b097-485c0244a019" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.896567] env[62383]: DEBUG nova.network.neutron [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.902399] env[62383]: DEBUG nova.compute.manager [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 40719661-5955-48ec-b289-b37896dd04df] Received event network-vif-deleted-93551a4b-143c-4a41-b504-5294fbc26f56 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 886.902773] env[62383]: DEBUG nova.compute.manager [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Received event network-vif-plugged-b32e6644-0a0d-489f-810c-598958631523 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 886.902844] env[62383]: DEBUG oslo_concurrency.lockutils [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] Acquiring lock "506afe7c-f19b-4417-b097-485c0244a019-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.902985] env[62383]: DEBUG oslo_concurrency.lockutils [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] Lock "506afe7c-f19b-4417-b097-485c0244a019-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.903164] env[62383]: DEBUG oslo_concurrency.lockutils [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] Lock "506afe7c-f19b-4417-b097-485c0244a019-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.903338] env[62383]: DEBUG nova.compute.manager [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] No waiting events found dispatching network-vif-plugged-b32e6644-0a0d-489f-810c-598958631523 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 886.903500] env[62383]: WARNING nova.compute.manager [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Received unexpected event network-vif-plugged-b32e6644-0a0d-489f-810c-598958631523 for instance with vm_state building and task_state spawning. [ 886.903660] env[62383]: DEBUG nova.compute.manager [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Received event network-changed-b32e6644-0a0d-489f-810c-598958631523 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 886.903818] env[62383]: DEBUG nova.compute.manager [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Refreshing instance network info cache due to event network-changed-b32e6644-0a0d-489f-810c-598958631523. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 886.904212] env[62383]: DEBUG oslo_concurrency.lockutils [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] Acquiring lock "refresh_cache-506afe7c-f19b-4417-b097-485c0244a019" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.275157] env[62383]: DEBUG nova.compute.manager [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Received event network-changed-6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 887.275462] env[62383]: DEBUG nova.compute.manager [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Refreshing instance network info cache due to event network-changed-6ba23e44-2c77-442d-9aee-5a75d8abab68. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 887.275570] env[62383]: DEBUG oslo_concurrency.lockutils [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] Acquiring lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.276729] env[62383]: DEBUG oslo_concurrency.lockutils [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] Acquired lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.276729] env[62383]: DEBUG nova.network.neutron [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Refreshing network info cache for port 6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 887.308545] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5270b87b-e2ab-769c-dbe8-c435575d7b68, 'name': SearchDatastore_Task, 'duration_secs': 0.014543} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.308545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.308947] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 887.309057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.309343] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.309665] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 887.310217] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f59e15d4-8c16-4bd3-ad09-3d77d57b40fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.324883] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 887.324883] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 887.326052] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53ef8882-5d59-4571-98e0-ecdea34f7b47 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.332932] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 887.332932] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c0f34e-7221-ff67-aab9-20ae99979667" [ 887.332932] env[62383]: _type = "Task" [ 887.332932] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.341610] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c0f34e-7221-ff67-aab9-20ae99979667, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.467503] env[62383]: DEBUG nova.network.neutron [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.705714] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4df84f-859b-4896-9193-746301cab7eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.715411] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54fb142-0087-4d64-a00d-98f05a6e0cbe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.753334] env[62383]: DEBUG nova.network.neutron [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.758021] env[62383]: DEBUG nova.network.neutron [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Updating instance_info_cache with network_info: [{"id": "b32e6644-0a0d-489f-810c-598958631523", "address": "fa:16:3e:50:c3:33", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb32e6644-0a", "ovs_interfaceid": "b32e6644-0a0d-489f-810c-598958631523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.758021] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa796db-e8d1-43ee-bfc8-b6a47cf0264d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.767111] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a544e7fc-3221-45a7-b0c6-44cc83f42421 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.788408] env[62383]: DEBUG nova.compute.provider_tree [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.846336] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c0f34e-7221-ff67-aab9-20ae99979667, 'name': SearchDatastore_Task, 'duration_secs': 0.012863} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.847118] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8042425-8e1d-4952-b5c0-c9971230f9a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.853022] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 887.853022] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52de4a61-ed42-2562-9089-4cb2fd8175c6" [ 887.853022] env[62383]: _type = "Task" [ 887.853022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.863541] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52de4a61-ed42-2562-9089-4cb2fd8175c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.010561] env[62383]: DEBUG nova.network.neutron [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updated VIF entry in instance network info cache for port 6ba23e44-2c77-442d-9aee-5a75d8abab68. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 888.010871] env[62383]: DEBUG nova.network.neutron [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.100769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "bc37e114-cf55-408b-9841-05eaf411b4f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.101056] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.101764] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "bc37e114-cf55-408b-9841-05eaf411b4f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.101972] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.102193] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.104256] env[62383]: INFO nova.compute.manager [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Terminating instance [ 888.261975] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.263164] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "refresh_cache-506afe7c-f19b-4417-b097-485c0244a019" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.263450] env[62383]: DEBUG nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Instance network_info: |[{"id": "b32e6644-0a0d-489f-810c-598958631523", "address": "fa:16:3e:50:c3:33", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb32e6644-0a", "ovs_interfaceid": "b32e6644-0a0d-489f-810c-598958631523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 888.264194] env[62383]: DEBUG oslo_concurrency.lockutils [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] Acquired lock "refresh_cache-506afe7c-f19b-4417-b097-485c0244a019" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.264378] env[62383]: DEBUG nova.network.neutron [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Refreshing network info cache for port b32e6644-0a0d-489f-810c-598958631523 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.265381] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:c3:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5f60c972-a72d-4c5f-a250-faadfd6eafbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b32e6644-0a0d-489f-810c-598958631523', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.273085] env[62383]: DEBUG oslo.service.loopingcall [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.274090] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.274654] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12a61775-0484-484c-9969-a355496c21f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.292040] env[62383]: DEBUG nova.scheduler.client.report [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 888.297658] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.297658] env[62383]: value = "task-2451835" [ 888.297658] env[62383]: _type = "Task" [ 888.297658] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.306830] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451835, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.365559] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52de4a61-ed42-2562-9089-4cb2fd8175c6, 'name': SearchDatastore_Task, 'duration_secs': 0.012847} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.365871] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.366338] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 888.366729] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7cb9154-2ad4-4812-b495-d3266801a771 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.376021] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 888.376021] env[62383]: value = "task-2451836" [ 888.376021] env[62383]: _type = "Task" [ 888.376021] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.387089] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.514203] env[62383]: DEBUG oslo_concurrency.lockutils [req-b5357a8a-cb07-4be6-b0d6-dade23ddd275 req-c55d8be3-dad0-4559-a454-c309e6b0600a service nova] Releasing lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.610137] env[62383]: DEBUG nova.compute.manager [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 888.610137] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.610137] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147dfc38-c328-4547-973a-aae0f71ed077 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.620077] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.620077] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b656016-41b6-4587-8fab-187a43b312a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.627633] env[62383]: DEBUG oslo_vmware.api [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 888.627633] env[62383]: value = "task-2451837" [ 888.627633] env[62383]: _type = "Task" [ 888.627633] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.632408] env[62383]: INFO nova.compute.manager [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Unrescuing [ 888.632691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.633025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquired lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.633736] env[62383]: DEBUG nova.network.neutron [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.641975] env[62383]: DEBUG oslo_vmware.api [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.803500] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.915s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.804145] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 888.806826] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.648s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.808369] env[62383]: INFO nova.compute.claims [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.818754] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451835, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.889838] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451836, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.013943] env[62383]: DEBUG nova.network.neutron [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Updated VIF entry in instance network info cache for port b32e6644-0a0d-489f-810c-598958631523. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.014327] env[62383]: DEBUG nova.network.neutron [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Updating instance_info_cache with network_info: [{"id": "b32e6644-0a0d-489f-810c-598958631523", "address": "fa:16:3e:50:c3:33", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb32e6644-0a", "ovs_interfaceid": "b32e6644-0a0d-489f-810c-598958631523", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.140998] env[62383]: DEBUG oslo_vmware.api [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.267355] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.267644] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e6099cf-dd4a-4c20-b15f-3a059651de02 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.277499] env[62383]: DEBUG oslo_vmware.api [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 889.277499] env[62383]: value = "task-2451838" [ 889.277499] env[62383]: _type = "Task" [ 889.277499] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.286276] env[62383]: DEBUG oslo_vmware.api [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.308913] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451835, 'name': CreateVM_Task, 'duration_secs': 0.544776} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.308913] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.309578] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.309739] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.310142] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 889.310406] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e1aefd0-3448-4067-8df5-831ee9244935 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.313220] env[62383]: DEBUG nova.compute.utils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 889.314608] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 889.314816] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 889.320860] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 889.320860] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52987b3b-1f40-a51c-1825-a4fc643b5fbb" [ 889.320860] env[62383]: _type = "Task" [ 889.320860] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.330596] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52987b3b-1f40-a51c-1825-a4fc643b5fbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.386163] env[62383]: DEBUG nova.policy [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eeedfa485774ec39dd7aba217199d6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd09227ae56ba4875954d0107ae5cf5f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.390673] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52091} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.391119] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 889.391519] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 889.391885] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05897223-111e-486e-aa1d-ae9e5f8d2ebc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.401019] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 889.401019] env[62383]: value = "task-2451839" [ 889.401019] env[62383]: _type = "Task" [ 889.401019] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.404459] env[62383]: DEBUG nova.network.neutron [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.412637] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451839, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.517266] env[62383]: DEBUG oslo_concurrency.lockutils [req-e9ac76de-40a6-40c7-86f3-47c7b81401dd req-03129024-68cd-434d-a7c2-75790b2b7612 service nova] Releasing lock "refresh_cache-506afe7c-f19b-4417-b097-485c0244a019" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.640304] env[62383]: DEBUG oslo_vmware.api [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451837, 'name': PowerOffVM_Task, 'duration_secs': 0.616311} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.640664] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.644021] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 889.644021] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82ec47a5-f168-454d-9dcd-2a8cf6395daf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.709701] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Successfully created port: 3ac42380-e58c-4b6c-81b1-92660033b445 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.723316] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 889.727881] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 889.728128] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleting the datastore file [datastore1] bc37e114-cf55-408b-9841-05eaf411b4f5 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 889.728437] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d13be79-1fbf-4c4a-8b53-13dc83f7776e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.736349] env[62383]: DEBUG oslo_vmware.api [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 889.736349] env[62383]: value = "task-2451841" [ 889.736349] env[62383]: _type = "Task" [ 889.736349] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.750020] env[62383]: DEBUG oslo_vmware.api [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.762166] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 889.762166] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496507', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'name': 'volume-46a8babb-a07c-4277-8a54-ff1a519becfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f05a2b-f323-4e4a-ac13-7f4745593be0', 'attached_at': '', 'detached_at': '', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'serial': '46a8babb-a07c-4277-8a54-ff1a519becfb'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 889.762303] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe867096-949f-4758-b373-fa04c81cce0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.783467] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ee45b1-9b10-4d26-8bd2-357a3a267240 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.791181] env[62383]: DEBUG oslo_vmware.api [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451838, 'name': PowerOnVM_Task, 'duration_secs': 0.425811} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.804050] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.804300] env[62383]: DEBUG nova.compute.manager [None req-a2f9f45a-c375-4f6c-9d06-ce00bea712ab tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.812182] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] volume-46a8babb-a07c-4277-8a54-ff1a519becfb/volume-46a8babb-a07c-4277-8a54-ff1a519becfb.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.813037] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804b22cb-12d2-4cd3-8943-50a03ae0b072 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.815883] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-781c68b4-578a-4536-809d-4a46aa05e558 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.831724] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 889.848744] env[62383]: DEBUG oslo_vmware.api [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 889.848744] env[62383]: value = "task-2451842" [ 889.848744] env[62383]: _type = "Task" [ 889.848744] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.857066] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.857373] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Processing image 2d173d61-3079-43d0-8982-7bc84898b336 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 889.857614] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336/2d173d61-3079-43d0-8982-7bc84898b336.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.857756] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336/2d173d61-3079-43d0-8982-7bc84898b336.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.857929] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.858222] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfd23e23-d7b9-4940-b88e-fd276631df18 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.864427] env[62383]: DEBUG oslo_vmware.api [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451842, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.886242] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.886524] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 889.888191] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7560c0d2-bd0f-499e-a67f-47ced1e5bf3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.898127] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 889.898127] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52724ec4-6abe-d8d3-b5ad-14b9a59e1c5c" [ 889.898127] env[62383]: _type = "Task" [ 889.898127] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.911016] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Releasing lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.911016] env[62383]: DEBUG nova.objects.instance [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lazy-loading 'flavor' on Instance uuid 1e367665-1d4b-4686-ac79-c946423c1762 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.913704] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52724ec4-6abe-d8d3-b5ad-14b9a59e1c5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.919448] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451839, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069867} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.919738] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 889.920671] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6476a6-55a0-4919-ab5a-8425b6b5640c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.947474] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 889.951006] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55cdf8a9-1199-4775-b80b-3108bbade174 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.972903] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 889.972903] env[62383]: value = "task-2451843" [ 889.972903] env[62383]: _type = "Task" [ 889.972903] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.982740] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451843, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.248247] env[62383]: DEBUG oslo_vmware.api [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.477249} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.251193] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 890.253018] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 890.253018] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 890.253018] env[62383]: INFO nova.compute.manager [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Took 1.64 seconds to destroy the instance on the hypervisor. [ 890.253018] env[62383]: DEBUG oslo.service.loopingcall [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.253018] env[62383]: DEBUG nova.compute.manager [-] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 890.253018] env[62383]: DEBUG nova.network.neutron [-] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 890.291820] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12403035-e287-4610-b387-73aa4e9f148b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.302912] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bce6b0e-d1d5-4cd3-960c-af36e2c70c0e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.350989] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a611893c-28b9-44a5-a93d-673c539956ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.372108] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bdf138-8ec7-4a64-b399-a366476aa589 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.376492] env[62383]: DEBUG oslo_vmware.api [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451842, 'name': ReconfigVM_Task, 'duration_secs': 0.500576} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.376881] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Reconfigured VM instance instance-0000002b to attach disk [datastore2] volume-46a8babb-a07c-4277-8a54-ff1a519becfb/volume-46a8babb-a07c-4277-8a54-ff1a519becfb.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 890.383632] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a54dfaef-639e-4fb4-94b6-d3bd052359a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.404949] env[62383]: DEBUG nova.compute.provider_tree [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.416876] env[62383]: DEBUG oslo_vmware.api [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 890.416876] env[62383]: value = "task-2451844" [ 890.416876] env[62383]: _type = "Task" [ 890.416876] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.426983] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Preparing fetch location {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 890.427275] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Fetch image to [datastore2] OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf/OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf.vmdk {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 890.429080] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Downloading stream optimized image 2d173d61-3079-43d0-8982-7bc84898b336 to [datastore2] OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf/OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf.vmdk on the data store datastore2 as vApp {{(pid=62383) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 890.429080] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Downloading image file data 2d173d61-3079-43d0-8982-7bc84898b336 to the ESX as VM named 'OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf' {{(pid=62383) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 890.430238] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e1cb26-e3b1-42c1-801a-744c49dc0298 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.443021] env[62383]: DEBUG oslo_vmware.api [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451844, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.463230] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 890.466033] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da00f422-55ec-41cc-8f95-0cc2c4406406 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.474162] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 890.474162] env[62383]: value = "task-2451845" [ 890.474162] env[62383]: _type = "Task" [ 890.474162] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.506572] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451843, 'name': ReconfigVM_Task, 'duration_secs': 0.496591} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.509733] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfigured VM instance instance-00000048 to attach disk [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 890.510445] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.510692] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c561230-3e76-4285-9dc1-a77cacdfe061 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.518879] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 890.518879] env[62383]: value = "task-2451846" [ 890.518879] env[62383]: _type = "Task" [ 890.518879] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.528764] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451846, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.550779] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 890.550779] env[62383]: value = "resgroup-9" [ 890.550779] env[62383]: _type = "ResourcePool" [ 890.550779] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 890.551209] env[62383]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a78297e7-3832-4813-a4d2-a950b6917329 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.575838] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease: (returnval){ [ 890.575838] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a2831-b110-94fd-5232-de2411312e9a" [ 890.575838] env[62383]: _type = "HttpNfcLease" [ 890.575838] env[62383]: } obtained for vApp import into resource pool (val){ [ 890.575838] env[62383]: value = "resgroup-9" [ 890.575838] env[62383]: _type = "ResourcePool" [ 890.575838] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 890.576205] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the lease: (returnval){ [ 890.576205] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a2831-b110-94fd-5232-de2411312e9a" [ 890.576205] env[62383]: _type = "HttpNfcLease" [ 890.576205] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 890.589610] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 890.589610] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a2831-b110-94fd-5232-de2411312e9a" [ 890.589610] env[62383]: _type = "HttpNfcLease" [ 890.589610] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 890.822481] env[62383]: DEBUG nova.compute.manager [req-b31a6a5b-d428-4daa-a2a0-f62c42aa5f6b req-f19d5530-5477-4d5e-b57c-33f44a5894f3 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Received event network-vif-deleted-59b6508c-9775-4b70-8003-690acbbb3e9b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 890.822481] env[62383]: INFO nova.compute.manager [req-b31a6a5b-d428-4daa-a2a0-f62c42aa5f6b req-f19d5530-5477-4d5e-b57c-33f44a5894f3 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Neutron deleted interface 59b6508c-9775-4b70-8003-690acbbb3e9b; detaching it from the instance and deleting it from the info cache [ 890.822481] env[62383]: DEBUG nova.network.neutron [req-b31a6a5b-d428-4daa-a2a0-f62c42aa5f6b req-f19d5530-5477-4d5e-b57c-33f44a5894f3 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.849112] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 890.871452] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.871984] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.871984] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.872199] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.872199] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.872318] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.872529] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.872706] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.872874] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.873054] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.873236] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.874086] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b35eae-8a57-407a-be30-0db57a87f38f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.882991] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772025e0-8847-4809-978c-b7885cc74fc0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.914928] env[62383]: DEBUG nova.scheduler.client.report [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 890.932240] env[62383]: DEBUG oslo_vmware.api [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451844, 'name': ReconfigVM_Task, 'duration_secs': 0.17523} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.932573] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496507', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'name': 'volume-46a8babb-a07c-4277-8a54-ff1a519becfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f05a2b-f323-4e4a-ac13-7f4745593be0', 'attached_at': '', 'detached_at': '', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'serial': '46a8babb-a07c-4277-8a54-ff1a519becfb'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 890.989503] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451845, 'name': PowerOffVM_Task, 'duration_secs': 0.231273} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.989735] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.995180] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfiguring VM instance instance-00000033 to detach disk 2002 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 890.995510] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afdf987f-79ce-49b6-9485-2f3863b0cc27 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.016722] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 891.016722] env[62383]: value = "task-2451848" [ 891.016722] env[62383]: _type = "Task" [ 891.016722] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.030469] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451846, 'name': Rename_Task, 'duration_secs': 0.201384} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.034284] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 891.034998] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451848, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.034998] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99ce703a-cb4c-4644-84b1-95eee647a5f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.043286] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 891.043286] env[62383]: value = "task-2451849" [ 891.043286] env[62383]: _type = "Task" [ 891.043286] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.056514] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.085774] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 891.085774] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a2831-b110-94fd-5232-de2411312e9a" [ 891.085774] env[62383]: _type = "HttpNfcLease" [ 891.085774] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 891.130949] env[62383]: DEBUG nova.network.neutron [-] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.271657] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Successfully updated port: 3ac42380-e58c-4b6c-81b1-92660033b445 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 891.329042] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9c8f644-91cd-4bf4-a82b-0be55f388a39 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.351551] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41b983c-1740-45a0-9414-b2c3d1abb1f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.404960] env[62383]: DEBUG nova.compute.manager [req-b31a6a5b-d428-4daa-a2a0-f62c42aa5f6b req-f19d5530-5477-4d5e-b57c-33f44a5894f3 service nova] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Detach interface failed, port_id=59b6508c-9775-4b70-8003-690acbbb3e9b, reason: Instance bc37e114-cf55-408b-9841-05eaf411b4f5 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 891.423082] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.423655] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 891.426734] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.847s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.428177] env[62383]: INFO nova.compute.claims [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.527678] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451848, 'name': ReconfigVM_Task, 'duration_secs': 0.3626} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.527965] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfigured VM instance instance-00000033 to detach disk 2002 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 891.528181] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 891.528441] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-647be5e9-f842-4cfb-8852-d90e386bb8c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.536564] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 891.536564] env[62383]: value = "task-2451850" [ 891.536564] env[62383]: _type = "Task" [ 891.536564] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.547184] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451850, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.557684] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451849, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.587307] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 891.587307] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a2831-b110-94fd-5232-de2411312e9a" [ 891.587307] env[62383]: _type = "HttpNfcLease" [ 891.587307] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 891.587646] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 891.587646] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a2831-b110-94fd-5232-de2411312e9a" [ 891.587646] env[62383]: _type = "HttpNfcLease" [ 891.587646] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 891.588741] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d5abb6-dfc2-43b9-8b25-8b2276621e7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.597121] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283cb0f-bb3f-2926-1bbb-d3bf9713a4e3/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 891.597329] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283cb0f-bb3f-2926-1bbb-d3bf9713a4e3/disk-0.vmdk. {{(pid=62383) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 891.657123] env[62383]: INFO nova.compute.manager [-] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Took 1.40 seconds to deallocate network for instance. [ 891.667337] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3873b82c-8906-4571-973e-ad1873b0f60c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.773493] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "refresh_cache-4d58d2e6-171d-4346-b281-bcbd22286623" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.773666] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "refresh_cache-4d58d2e6-171d-4346-b281-bcbd22286623" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.773826] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.934088] env[62383]: DEBUG nova.compute.utils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 891.937583] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 891.938246] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 891.990638] env[62383]: DEBUG nova.policy [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eeedfa485774ec39dd7aba217199d6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd09227ae56ba4875954d0107ae5cf5f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 891.997179] env[62383]: DEBUG nova.objects.instance [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'flavor' on Instance uuid 67f05a2b-f323-4e4a-ac13-7f4745593be0 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.061759] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451850, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.069136] env[62383]: DEBUG oslo_vmware.api [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2451849, 'name': PowerOnVM_Task, 'duration_secs': 0.546061} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.070594] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.070914] env[62383]: INFO nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Took 8.94 seconds to spawn the instance on the hypervisor. [ 892.071126] env[62383]: DEBUG nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 892.071979] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92e958d-f3bb-4df3-a5e8-1a6b17d2a09b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.170590] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.337993] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.439392] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 892.515870] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7c5b80d7-be20-4726-ac0d-1d4a162158f7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.553s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.554482] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451850, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.606646] env[62383]: INFO nova.compute.manager [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Took 25.32 seconds to build instance. [ 892.639233] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Completed reading data from the image iterator. {{(pid=62383) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 892.639496] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283cb0f-bb3f-2926-1bbb-d3bf9713a4e3/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 892.640864] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b8aed1-134d-4c22-8333-d006eeaa2e0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.649073] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283cb0f-bb3f-2926-1bbb-d3bf9713a4e3/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 892.649462] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283cb0f-bb3f-2926-1bbb-d3bf9713a4e3/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 892.649551] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-49f71736-5c1d-4192-8dbf-6ecbd4ec70f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.669704] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Successfully created port: 33512c3d-a673-4233-8a74-a0972714fa89 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 892.706757] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Updating instance_info_cache with network_info: [{"id": "3ac42380-e58c-4b6c-81b1-92660033b445", "address": "fa:16:3e:93:3c:10", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ac42380-e5", "ovs_interfaceid": "3ac42380-e58c-4b6c-81b1-92660033b445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.877647] env[62383]: DEBUG nova.compute.manager [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Received event network-vif-plugged-3ac42380-e58c-4b6c-81b1-92660033b445 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 892.878014] env[62383]: DEBUG oslo_concurrency.lockutils [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] Acquiring lock "4d58d2e6-171d-4346-b281-bcbd22286623-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.878128] env[62383]: DEBUG oslo_concurrency.lockutils [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] Lock "4d58d2e6-171d-4346-b281-bcbd22286623-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.878311] env[62383]: DEBUG oslo_concurrency.lockutils [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] Lock "4d58d2e6-171d-4346-b281-bcbd22286623-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.878485] env[62383]: DEBUG nova.compute.manager [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] No waiting events found dispatching network-vif-plugged-3ac42380-e58c-4b6c-81b1-92660033b445 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 892.878652] env[62383]: WARNING nova.compute.manager [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Received unexpected event network-vif-plugged-3ac42380-e58c-4b6c-81b1-92660033b445 for instance with vm_state building and task_state spawning. [ 892.878876] env[62383]: DEBUG nova.compute.manager [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Received event network-changed-3ac42380-e58c-4b6c-81b1-92660033b445 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 892.879147] env[62383]: DEBUG nova.compute.manager [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Refreshing instance network info cache due to event network-changed-3ac42380-e58c-4b6c-81b1-92660033b445. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 892.879861] env[62383]: DEBUG oslo_concurrency.lockutils [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] Acquiring lock "refresh_cache-4d58d2e6-171d-4346-b281-bcbd22286623" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.939980] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662fa5e1-1333-4baf-876d-f8e7f5c7af71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.952851] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f07b81-ae4d-4ef5-b82a-486b818b5c90 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.992557] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e420e2c5-9168-418b-835f-db589f581fac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.002270] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd573d8c-f9c8-4336-ae15-e83bf6180a73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.020311] env[62383]: DEBUG nova.compute.provider_tree [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.051527] env[62383]: DEBUG oslo_vmware.api [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451850, 'name': PowerOnVM_Task, 'duration_secs': 1.22198} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.051758] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.052139] env[62383]: DEBUG nova.compute.manager [None req-0dc89168-5637-4cb8-8fc1-01e4319eadbc tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 893.053673] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e632072e-0d9e-48ec-9cd1-78917f640c01 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.104452] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5878ce5e-8d64-426c-9c2a-55949e0bea23 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.833s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.165060] env[62383]: DEBUG oslo_vmware.rw_handles [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283cb0f-bb3f-2926-1bbb-d3bf9713a4e3/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 893.165252] env[62383]: INFO nova.virt.vmwareapi.images [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Downloaded image file data 2d173d61-3079-43d0-8982-7bc84898b336 [ 893.166521] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab4b495-61bf-49bd-a411-a4f5b4ad3cf5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.189574] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01df5745-d28c-4a40-a0bb-d3a272636f78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.210124] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "refresh_cache-4d58d2e6-171d-4346-b281-bcbd22286623" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.211543] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Instance network_info: |[{"id": "3ac42380-e58c-4b6c-81b1-92660033b445", "address": "fa:16:3e:93:3c:10", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ac42380-e5", "ovs_interfaceid": "3ac42380-e58c-4b6c-81b1-92660033b445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 893.211543] env[62383]: DEBUG oslo_concurrency.lockutils [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] Acquired lock "refresh_cache-4d58d2e6-171d-4346-b281-bcbd22286623" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.211799] env[62383]: DEBUG nova.network.neutron [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Refreshing network info cache for port 3ac42380-e58c-4b6c-81b1-92660033b445 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 893.212412] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:3c:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ac42380-e58c-4b6c-81b1-92660033b445', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 893.225446] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Creating folder: Project (d09227ae56ba4875954d0107ae5cf5f8). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 893.227695] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c26b7f2-e2d2-4e47-a1de-591856f90314 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.241897] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Created folder: Project (d09227ae56ba4875954d0107ae5cf5f8) in parent group-v496304. [ 893.242133] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Creating folder: Instances. Parent ref: group-v496511. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 893.242396] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f8d1509-fc35-4fab-ac31-ae24bd678f70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.253983] env[62383]: INFO nova.virt.vmwareapi.images [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] The imported VM was unregistered [ 893.256607] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Caching image {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 893.256878] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating directory with path [datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.257184] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5ce5846-3616-4ea3-9ddd-e8cac98b8e14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.260665] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Created folder: Instances in parent group-v496511. [ 893.260898] env[62383]: DEBUG oslo.service.loopingcall [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.262158] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 893.262418] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08f31e10-2e93-4a87-8d51-972a4b8c890b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.284453] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created directory with path [datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.284453] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf/OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf.vmdk to [datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336/2d173d61-3079-43d0-8982-7bc84898b336.vmdk. {{(pid=62383) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 893.285448] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6c5c44b6-1269-457d-90a8-d7c086b0d43d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.288285] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 893.288285] env[62383]: value = "task-2451854" [ 893.288285] env[62383]: _type = "Task" [ 893.288285] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.293831] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 893.293831] env[62383]: value = "task-2451855" [ 893.293831] env[62383]: _type = "Task" [ 893.293831] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.299823] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451854, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.306677] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.461757] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 893.501361] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 893.501757] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.502165] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 893.502370] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.502626] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 893.502883] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 893.504242] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 893.504242] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 893.504242] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 893.504242] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 893.504426] env[62383]: DEBUG nova.virt.hardware [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 893.505597] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd6d294-1baf-4763-be72-b0af3e2590ec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.515754] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0669a03-549f-411d-bac7-b30f09902aa8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.531888] env[62383]: DEBUG nova.scheduler.client.report [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.804143] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451854, 'name': CreateVM_Task, 'duration_secs': 0.496044} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.804143] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 893.804143] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.804143] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.804495] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 893.804645] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae227afe-970a-48f0-9ed2-323e32704fcd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.811575] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.816920] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 893.816920] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d049d2-0bc9-e406-f253-3d42836c494b" [ 893.816920] env[62383]: _type = "Task" [ 893.816920] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.829871] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d049d2-0bc9-e406-f253-3d42836c494b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.989809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "eb632e2d-b71e-446d-83a2-0bab1d823d27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.990594] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.036977] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.037283] env[62383]: DEBUG nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 894.044388] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.620s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.046364] env[62383]: INFO nova.compute.claims [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.143593] env[62383]: DEBUG nova.network.neutron [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Updated VIF entry in instance network info cache for port 3ac42380-e58c-4b6c-81b1-92660033b445. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 894.143593] env[62383]: DEBUG nova.network.neutron [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Updating instance_info_cache with network_info: [{"id": "3ac42380-e58c-4b6c-81b1-92660033b445", "address": "fa:16:3e:93:3c:10", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ac42380-e5", "ovs_interfaceid": "3ac42380-e58c-4b6c-81b1-92660033b445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.306920] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.331885] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d049d2-0bc9-e406-f253-3d42836c494b, 'name': SearchDatastore_Task, 'duration_secs': 0.095298} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.332479] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.332737] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.332985] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.333146] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.333328] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 894.333627] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9127b9fa-a321-441e-a7a9-085572506eed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.352828] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 894.353032] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 894.353853] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9faf0517-aab6-41ae-adb3-ec26819ace11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.361516] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 894.361516] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5205c67d-95fe-9177-949d-41b0e2f8f104" [ 894.361516] env[62383]: _type = "Task" [ 894.361516] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.371363] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5205c67d-95fe-9177-949d-41b0e2f8f104, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.493903] env[62383]: DEBUG nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 894.554678] env[62383]: DEBUG nova.compute.utils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.559445] env[62383]: DEBUG nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 894.565563] env[62383]: DEBUG nova.network.neutron [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 894.616535] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Successfully updated port: 33512c3d-a673-4233-8a74-a0972714fa89 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.632404] env[62383]: DEBUG nova.policy [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4de9dec9c1d2474eb611f4a2623d602d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aead8ea1d1de4d0d8d8c07dec519d8b4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 894.645275] env[62383]: DEBUG oslo_concurrency.lockutils [req-545d085a-8d93-4b73-87ac-4621104a1d25 req-942da65e-93f9-46c3-9e75-8601770def7d service nova] Releasing lock "refresh_cache-4d58d2e6-171d-4346-b281-bcbd22286623" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.719907] env[62383]: DEBUG nova.compute.manager [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Received event network-changed-6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 894.720429] env[62383]: DEBUG nova.compute.manager [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Refreshing instance network info cache due to event network-changed-6ba23e44-2c77-442d-9aee-5a75d8abab68. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 894.720676] env[62383]: DEBUG oslo_concurrency.lockutils [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] Acquiring lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.720827] env[62383]: DEBUG oslo_concurrency.lockutils [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] Acquired lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.720999] env[62383]: DEBUG nova.network.neutron [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Refreshing network info cache for port 6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.817277] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.874558] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5205c67d-95fe-9177-949d-41b0e2f8f104, 'name': SearchDatastore_Task, 'duration_secs': 0.089197} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.875432] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d8c866e-0d4f-4930-9627-139654c1b0a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.884772] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 894.884772] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a8c5ea-506a-517a-6323-d712578cfd40" [ 894.884772] env[62383]: _type = "Task" [ 894.884772] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.897240] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a8c5ea-506a-517a-6323-d712578cfd40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.010838] env[62383]: DEBUG nova.network.neutron [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Successfully created port: 219a19a2-eb69-4683-81ac-a79596cb28f3 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.021237] env[62383]: DEBUG nova.compute.manager [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Received event network-vif-plugged-33512c3d-a673-4233-8a74-a0972714fa89 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 895.021460] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] Acquiring lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.021625] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.021782] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.022410] env[62383]: DEBUG nova.compute.manager [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] No waiting events found dispatching network-vif-plugged-33512c3d-a673-4233-8a74-a0972714fa89 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 895.022622] env[62383]: WARNING nova.compute.manager [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Received unexpected event network-vif-plugged-33512c3d-a673-4233-8a74-a0972714fa89 for instance with vm_state building and task_state spawning. [ 895.022798] env[62383]: DEBUG nova.compute.manager [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Received event network-changed-33512c3d-a673-4233-8a74-a0972714fa89 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 895.023243] env[62383]: DEBUG nova.compute.manager [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Refreshing instance network info cache due to event network-changed-33512c3d-a673-4233-8a74-a0972714fa89. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 895.023312] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] Acquiring lock "refresh_cache-b9669bb8-680f-492a-a7c6-82e6edb0a8ed" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.023402] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] Acquired lock "refresh_cache-b9669bb8-680f-492a-a7c6-82e6edb0a8ed" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.023560] env[62383]: DEBUG nova.network.neutron [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Refreshing network info cache for port 33512c3d-a673-4233-8a74-a0972714fa89 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.029934] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.072967] env[62383]: DEBUG nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 895.121315] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "refresh_cache-b9669bb8-680f-492a-a7c6-82e6edb0a8ed" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.317372] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.403930] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a8c5ea-506a-517a-6323-d712578cfd40, 'name': SearchDatastore_Task, 'duration_secs': 0.096536} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.406908] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.408067] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4d58d2e6-171d-4346-b281-bcbd22286623/4d58d2e6-171d-4346-b281-bcbd22286623.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 895.408067] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d64d3e2b-5673-4e6c-b043-b7d1f86f6989 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.423267] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 895.423267] env[62383]: value = "task-2451856" [ 895.423267] env[62383]: _type = "Task" [ 895.423267] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.441063] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.586574] env[62383]: DEBUG nova.network.neutron [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updated VIF entry in instance network info cache for port 6ba23e44-2c77-442d-9aee-5a75d8abab68. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 895.587088] env[62383]: DEBUG nova.network.neutron [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.615177] env[62383]: DEBUG nova.network.neutron [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.639398] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375d71bf-310e-48ba-8734-ff418d71c86d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.655028] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc4cb14-571f-470e-b61d-6e66c20e1d9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.690083] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4504119-6b76-472e-8a53-7c6c69494d2c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.701263] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f12ce1-2c82-4f7e-bc51-ce164f7dd71a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.717958] env[62383]: DEBUG nova.compute.provider_tree [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.813304] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.875272] env[62383]: DEBUG nova.network.neutron [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.936041] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.095657] env[62383]: DEBUG nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 896.098570] env[62383]: DEBUG oslo_concurrency.lockutils [req-6d0e1407-7419-4fc3-a99c-7fa21436bfc9 req-daa340ae-37c1-4f3d-b6fb-abc976ce3074 service nova] Releasing lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.134076] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 896.134076] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.134076] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 896.134286] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.134286] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 896.134286] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 896.134286] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 896.134286] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 896.134434] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 896.134434] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 896.134434] env[62383]: DEBUG nova.virt.hardware [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 896.134434] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3a7601-0101-4dc3-9cad-96da376c95ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.143256] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d45d66-3b7e-42e6-815d-90ea47eae513 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.223107] env[62383]: DEBUG nova.scheduler.client.report [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.317696] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.379116] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc8ab625-39e9-43a0-9f67-cf77bea17def req-6b045b2c-1410-48b5-9441-4d318d947e46 service nova] Releasing lock "refresh_cache-b9669bb8-680f-492a-a7c6-82e6edb0a8ed" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.380015] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "refresh_cache-b9669bb8-680f-492a-a7c6-82e6edb0a8ed" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.380015] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.436948] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451856, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.674340] env[62383]: DEBUG nova.network.neutron [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Successfully updated port: 219a19a2-eb69-4683-81ac-a79596cb28f3 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.733566] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.689s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.734966] env[62383]: DEBUG nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 896.740170] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.717s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.743384] env[62383]: INFO nova.compute.claims [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.753377] env[62383]: DEBUG nova.compute.manager [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Received event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 896.753377] env[62383]: DEBUG nova.compute.manager [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing instance network info cache due to event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 896.753569] env[62383]: DEBUG oslo_concurrency.lockutils [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] Acquiring lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.753711] env[62383]: DEBUG oslo_concurrency.lockutils [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] Acquired lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.753879] env[62383]: DEBUG nova.network.neutron [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.815144] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451855, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.087101} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.815483] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf/OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf.vmdk to [datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336/2d173d61-3079-43d0-8982-7bc84898b336.vmdk. [ 896.815706] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Cleaning up location [datastore2] OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 896.815929] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_98006a1a-0ef8-4594-9257-c54349bd9cbf {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 896.816557] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-021c9667-8687-4be3-9279-6a81db8709a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.831870] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 896.831870] env[62383]: value = "task-2451857" [ 896.831870] env[62383]: _type = "Task" [ 896.831870] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.842498] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.920288] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.947890] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451856, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.071426] env[62383]: DEBUG nova.compute.manager [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Received event network-vif-plugged-219a19a2-eb69-4683-81ac-a79596cb28f3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 897.071634] env[62383]: DEBUG oslo_concurrency.lockutils [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] Acquiring lock "23d24da6-c7d8-4d6a-8442-a1066505aab1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.071833] env[62383]: DEBUG oslo_concurrency.lockutils [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.071998] env[62383]: DEBUG oslo_concurrency.lockutils [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.073155] env[62383]: DEBUG nova.compute.manager [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] No waiting events found dispatching network-vif-plugged-219a19a2-eb69-4683-81ac-a79596cb28f3 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 897.073435] env[62383]: WARNING nova.compute.manager [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Received unexpected event network-vif-plugged-219a19a2-eb69-4683-81ac-a79596cb28f3 for instance with vm_state building and task_state spawning. [ 897.073699] env[62383]: DEBUG nova.compute.manager [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Received event network-changed-219a19a2-eb69-4683-81ac-a79596cb28f3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 897.073894] env[62383]: DEBUG nova.compute.manager [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Refreshing instance network info cache due to event network-changed-219a19a2-eb69-4683-81ac-a79596cb28f3. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 897.075508] env[62383]: DEBUG oslo_concurrency.lockutils [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] Acquiring lock "refresh_cache-23d24da6-c7d8-4d6a-8442-a1066505aab1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.075508] env[62383]: DEBUG oslo_concurrency.lockutils [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] Acquired lock "refresh_cache-23d24da6-c7d8-4d6a-8442-a1066505aab1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.075508] env[62383]: DEBUG nova.network.neutron [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Refreshing network info cache for port 219a19a2-eb69-4683-81ac-a79596cb28f3 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 897.078601] env[62383]: DEBUG nova.network.neutron [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Updating instance_info_cache with network_info: [{"id": "33512c3d-a673-4233-8a74-a0972714fa89", "address": "fa:16:3e:7d:7c:6d", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33512c3d-a6", "ovs_interfaceid": "33512c3d-a673-4233-8a74-a0972714fa89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.180034] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-23d24da6-c7d8-4d6a-8442-a1066505aab1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.250512] env[62383]: DEBUG nova.compute.utils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.251898] env[62383]: DEBUG nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 897.253224] env[62383]: DEBUG nova.network.neutron [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 897.299392] env[62383]: DEBUG nova.policy [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52222401600845bcb88d02f000771658', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2b77864c75943b4a625276225c3aac9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 897.345810] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.266878} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.346291] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 897.346497] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336/2d173d61-3079-43d0-8982-7bc84898b336.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.347048] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336/2d173d61-3079-43d0-8982-7bc84898b336.vmdk to [datastore2] 506afe7c-f19b-4417-b097-485c0244a019/506afe7c-f19b-4417-b097-485c0244a019.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.347383] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-938d461a-e157-4e26-a326-a2cb8ed72bca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.364354] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 897.364354] env[62383]: value = "task-2451858" [ 897.364354] env[62383]: _type = "Task" [ 897.364354] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.441360] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451856, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.671697} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.443883] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4d58d2e6-171d-4346-b281-bcbd22286623/4d58d2e6-171d-4346-b281-bcbd22286623.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.445728] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.446098] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e49a939d-537b-462b-99d6-f12aba379db0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.461300] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 897.461300] env[62383]: value = "task-2451859" [ 897.461300] env[62383]: _type = "Task" [ 897.461300] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.474940] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.584588] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "refresh_cache-b9669bb8-680f-492a-a7c6-82e6edb0a8ed" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.584932] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Instance network_info: |[{"id": "33512c3d-a673-4233-8a74-a0972714fa89", "address": "fa:16:3e:7d:7c:6d", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33512c3d-a6", "ovs_interfaceid": "33512c3d-a673-4233-8a74-a0972714fa89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 897.585378] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:7c:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33512c3d-a673-4233-8a74-a0972714fa89', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.595309] env[62383]: DEBUG oslo.service.loopingcall [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 897.595968] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.596230] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dc59b19-f59a-4fed-badf-81ce1a3258e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.615738] env[62383]: DEBUG nova.network.neutron [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.624479] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.624479] env[62383]: value = "task-2451860" [ 897.624479] env[62383]: _type = "Task" [ 897.624479] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.637230] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451860, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.713632] env[62383]: DEBUG nova.network.neutron [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Successfully created port: 387d5009-0a75-4cba-9ab0-bbdbd5398119 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.732037] env[62383]: DEBUG nova.network.neutron [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.757383] env[62383]: DEBUG nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 897.877468] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451858, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.971724] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.278454} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.972102] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.975763] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468e5b05-ced0-42ee-8917-b045ca9b4037 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.000853] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 4d58d2e6-171d-4346-b281-bcbd22286623/4d58d2e6-171d-4346-b281-bcbd22286623.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.007231] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ff8e786-49af-4fbc-9b42-5de2cc1050c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.032993] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 898.032993] env[62383]: value = "task-2451861" [ 898.032993] env[62383]: _type = "Task" [ 898.032993] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.047125] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.135550] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451860, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.136911] env[62383]: DEBUG nova.network.neutron [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updated VIF entry in instance network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.137438] env[62383]: DEBUG nova.network.neutron [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.237287] env[62383]: DEBUG oslo_concurrency.lockutils [req-305b0b1f-3baa-480c-98be-9e612365161b req-602be59f-f378-41a0-8e78-cdee30c9259a service nova] Releasing lock "refresh_cache-23d24da6-c7d8-4d6a-8442-a1066505aab1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.237287] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-23d24da6-c7d8-4d6a-8442-a1066505aab1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.237287] env[62383]: DEBUG nova.network.neutron [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.252395] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26d3b37-15b6-4058-8215-218b7ff0928a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.274931] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b3c97e-e4cf-4ed8-944b-8e9608ac2b17 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.319779] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041d3837-c5b9-4fbd-bffc-c0ad85c9f238 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.330346] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd34132-01aa-446e-8561-dab823ec0653 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.347758] env[62383]: DEBUG nova.compute.provider_tree [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.376592] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451858, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.548033] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.636314] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451860, 'name': CreateVM_Task, 'duration_secs': 0.749506} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.636546] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 898.637336] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.637585] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.637936] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 898.638239] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9be01a41-c56e-4b14-9fee-8dcffdd56e8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.645779] env[62383]: DEBUG oslo_concurrency.lockutils [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] Releasing lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.646079] env[62383]: DEBUG nova.compute.manager [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Received event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 898.646298] env[62383]: DEBUG nova.compute.manager [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing instance network info cache due to event network-changed-e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 898.646571] env[62383]: DEBUG oslo_concurrency.lockutils [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] Acquiring lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.646731] env[62383]: DEBUG oslo_concurrency.lockutils [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] Acquired lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.646922] env[62383]: DEBUG nova.network.neutron [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Refreshing network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.649044] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 898.649044] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e33b23-ed04-a1d1-f23b-64bef6abf6cb" [ 898.649044] env[62383]: _type = "Task" [ 898.649044] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.660121] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e33b23-ed04-a1d1-f23b-64bef6abf6cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.775114] env[62383]: DEBUG nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 898.786584] env[62383]: DEBUG nova.network.neutron [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.816571] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 898.816947] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.819133] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 898.819462] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.819547] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 898.819696] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 898.819913] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 898.820106] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 898.822019] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 898.822019] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 898.822019] env[62383]: DEBUG nova.virt.hardware [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 898.822019] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edad2bf-a35c-45eb-a9f4-d99ceaffc7d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.834414] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6c8962-0457-4a63-a111-19e205b9f70c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.852577] env[62383]: DEBUG nova.scheduler.client.report [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.881223] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451858, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.052779] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451861, 'name': ReconfigVM_Task, 'duration_secs': 0.861568} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.053253] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 4d58d2e6-171d-4346-b281-bcbd22286623/4d58d2e6-171d-4346-b281-bcbd22286623.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.053954] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3df9a2b-db4a-4043-af81-ecb1d778a087 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.066727] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 899.066727] env[62383]: value = "task-2451862" [ 899.066727] env[62383]: _type = "Task" [ 899.066727] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.084249] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451862, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.165276] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e33b23-ed04-a1d1-f23b-64bef6abf6cb, 'name': SearchDatastore_Task, 'duration_secs': 0.090704} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.165704] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.166064] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.166397] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.166559] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.166765] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.167130] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2329fa3-0ba9-4565-ab09-c3a26645f31c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.170648] env[62383]: DEBUG nova.network.neutron [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Updating instance_info_cache with network_info: [{"id": "219a19a2-eb69-4683-81ac-a79596cb28f3", "address": "fa:16:3e:95:e0:e6", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap219a19a2-eb", "ovs_interfaceid": "219a19a2-eb69-4683-81ac-a79596cb28f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.188078] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.188294] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.189231] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bde9412f-8ce8-44d3-b2b5-219684f13065 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.198448] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 899.198448] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526adb04-a26d-d119-9055-5641157f9383" [ 899.198448] env[62383]: _type = "Task" [ 899.198448] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.209584] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526adb04-a26d-d119-9055-5641157f9383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.226289] env[62383]: DEBUG nova.compute.manager [req-22e7e2c1-6de2-41a3-8c3f-09ea4a381141 req-7ce6c059-7c29-4dec-ab63-e12b04b8c3b0 service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Received event network-vif-plugged-387d5009-0a75-4cba-9ab0-bbdbd5398119 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 899.226289] env[62383]: DEBUG oslo_concurrency.lockutils [req-22e7e2c1-6de2-41a3-8c3f-09ea4a381141 req-7ce6c059-7c29-4dec-ab63-e12b04b8c3b0 service nova] Acquiring lock "0f6b7094-27a0-4e97-98ac-bff857124b6c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.226289] env[62383]: DEBUG oslo_concurrency.lockutils [req-22e7e2c1-6de2-41a3-8c3f-09ea4a381141 req-7ce6c059-7c29-4dec-ab63-e12b04b8c3b0 service nova] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.226289] env[62383]: DEBUG oslo_concurrency.lockutils [req-22e7e2c1-6de2-41a3-8c3f-09ea4a381141 req-7ce6c059-7c29-4dec-ab63-e12b04b8c3b0 service nova] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.226289] env[62383]: DEBUG nova.compute.manager [req-22e7e2c1-6de2-41a3-8c3f-09ea4a381141 req-7ce6c059-7c29-4dec-ab63-e12b04b8c3b0 service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] No waiting events found dispatching network-vif-plugged-387d5009-0a75-4cba-9ab0-bbdbd5398119 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 899.226487] env[62383]: WARNING nova.compute.manager [req-22e7e2c1-6de2-41a3-8c3f-09ea4a381141 req-7ce6c059-7c29-4dec-ab63-e12b04b8c3b0 service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Received unexpected event network-vif-plugged-387d5009-0a75-4cba-9ab0-bbdbd5398119 for instance with vm_state building and task_state spawning. [ 899.358919] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.359557] env[62383]: DEBUG nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 899.364700] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.637s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.367957] env[62383]: INFO nova.compute.claims [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.391188] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451858, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.405853] env[62383]: DEBUG nova.network.neutron [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Successfully updated port: 387d5009-0a75-4cba-9ab0-bbdbd5398119 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.528770] env[62383]: DEBUG nova.network.neutron [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updated VIF entry in instance network info cache for port e09c6085-476c-4c95-a6e0-1175a4786e4d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 899.529339] env[62383]: DEBUG nova.network.neutron [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [{"id": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "address": "fa:16:3e:24:0a:9f", "network": {"id": "c69cd1de-2f22-4327-b936-2410e4165491", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-11143258-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bed29fa2bc64a31b3324d7d0d01c61d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09c6085-47", "ovs_interfaceid": "e09c6085-476c-4c95-a6e0-1175a4786e4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.579377] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451862, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.677799] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-23d24da6-c7d8-4d6a-8442-a1066505aab1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.678188] env[62383]: DEBUG nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance network_info: |[{"id": "219a19a2-eb69-4683-81ac-a79596cb28f3", "address": "fa:16:3e:95:e0:e6", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap219a19a2-eb", "ovs_interfaceid": "219a19a2-eb69-4683-81ac-a79596cb28f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 899.678752] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:e0:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '219a19a2-eb69-4683-81ac-a79596cb28f3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.687965] env[62383]: DEBUG oslo.service.loopingcall [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 899.688269] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.688529] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a564e53d-f9b5-4258-83aa-665df28ecadd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.714561] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526adb04-a26d-d119-9055-5641157f9383, 'name': SearchDatastore_Task, 'duration_secs': 0.091788} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.716599] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.716599] env[62383]: value = "task-2451863" [ 899.716599] env[62383]: _type = "Task" [ 899.716599] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.716863] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2808db78-0e0f-4f94-8d74-5deef18e0f21 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.728998] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451863, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.730130] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 899.730130] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d3bf8f-d376-6c24-c7e9-088cf1a2596c" [ 899.730130] env[62383]: _type = "Task" [ 899.730130] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.738861] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d3bf8f-d376-6c24-c7e9-088cf1a2596c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.874927] env[62383]: DEBUG nova.compute.utils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.876635] env[62383]: DEBUG nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.876942] env[62383]: DEBUG nova.network.neutron [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.892404] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451858, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.912423] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "refresh_cache-0f6b7094-27a0-4e97-98ac-bff857124b6c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.912423] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "refresh_cache-0f6b7094-27a0-4e97-98ac-bff857124b6c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.912423] env[62383]: DEBUG nova.network.neutron [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.922364] env[62383]: DEBUG nova.policy [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5aabb0bddb354368bca91987b80ddff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a9dddf0be63492aa59c78d5bcddd9fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 900.035183] env[62383]: DEBUG oslo_concurrency.lockutils [req-63f2c721-9d6b-45fd-8bb4-ff8d36e78b2a req-28d3d383-5b5c-4908-92fc-a109b0758245 service nova] Releasing lock "refresh_cache-1e367665-1d4b-4686-ac79-c946423c1762" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.081095] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451862, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.234519] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451863, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.235066] env[62383]: DEBUG nova.network.neutron [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Successfully created port: e6fce712-4e28-4f58-ad16-497490c0dded {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.247746] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d3bf8f-d376-6c24-c7e9-088cf1a2596c, 'name': SearchDatastore_Task, 'duration_secs': 0.096064} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.248014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.248287] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] b9669bb8-680f-492a-a7c6-82e6edb0a8ed/b9669bb8-680f-492a-a7c6-82e6edb0a8ed.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.248557] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-787ae19a-fd74-4600-a24f-9009ec7cf0e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.256531] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 900.256531] env[62383]: value = "task-2451864" [ 900.256531] env[62383]: _type = "Task" [ 900.256531] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.266297] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451864, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.384503] env[62383]: DEBUG nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 900.394347] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451858, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.470580] env[62383]: DEBUG nova.network.neutron [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.579488] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451862, 'name': Rename_Task, 'duration_secs': 1.387829} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.582750] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.586213] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7bf0fc0-9915-4357-95cc-81ae4acbf684 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.598351] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 900.598351] env[62383]: value = "task-2451865" [ 900.598351] env[62383]: _type = "Task" [ 900.598351] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.609812] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.682725] env[62383]: DEBUG nova.network.neutron [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Updating instance_info_cache with network_info: [{"id": "387d5009-0a75-4cba-9ab0-bbdbd5398119", "address": "fa:16:3e:ba:36:63", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap387d5009-0a", "ovs_interfaceid": "387d5009-0a75-4cba-9ab0-bbdbd5398119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.733247] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451863, 'name': CreateVM_Task, 'duration_secs': 0.95259} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.735401] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.736514] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.736641] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.736957] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 900.737230] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5a1f58f-95ee-452b-b986-5bed6879afb5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.742280] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 900.742280] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5267604b-2db4-4bed-e9e8-48b9d5e111a2" [ 900.742280] env[62383]: _type = "Task" [ 900.742280] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.753067] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5267604b-2db4-4bed-e9e8-48b9d5e111a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.769602] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451864, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.833412] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d5dd79-edf0-4f9b-943f-e5f0f07300cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.842201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f919b013-1221-4f8b-a385-22da7aa0cb5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.875332] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6135cecf-f388-4b46-8be0-d57cd52754dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.889559] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3445b4-4d34-4065-828d-d4959f6a4be3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.893444] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451858, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.211921} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.893662] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/2d173d61-3079-43d0-8982-7bc84898b336/2d173d61-3079-43d0-8982-7bc84898b336.vmdk to [datastore2] 506afe7c-f19b-4417-b097-485c0244a019/506afe7c-f19b-4417-b097-485c0244a019.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 900.894826] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521ea19f-c27c-48a8-8857-a1af754a9fd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.908141] env[62383]: DEBUG nova.compute.provider_tree [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.930421] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 506afe7c-f19b-4417-b097-485c0244a019/506afe7c-f19b-4417-b097-485c0244a019.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 900.931340] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d71cbf91-0e09-4dec-ae68-5d44e0979625 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.952667] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 900.952667] env[62383]: value = "task-2451866" [ 900.952667] env[62383]: _type = "Task" [ 900.952667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.963244] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451866, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.107698] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451865, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.187042] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "refresh_cache-0f6b7094-27a0-4e97-98ac-bff857124b6c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.187437] env[62383]: DEBUG nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Instance network_info: |[{"id": "387d5009-0a75-4cba-9ab0-bbdbd5398119", "address": "fa:16:3e:ba:36:63", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap387d5009-0a", "ovs_interfaceid": "387d5009-0a75-4cba-9ab0-bbdbd5398119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 901.187874] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:36:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '387d5009-0a75-4cba-9ab0-bbdbd5398119', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.195438] env[62383]: DEBUG oslo.service.loopingcall [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.195634] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.195868] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f127b6b3-5559-4a4f-9d89-6efdd28e249a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.217042] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.217042] env[62383]: value = "task-2451867" [ 901.217042] env[62383]: _type = "Task" [ 901.217042] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.225778] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451867, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.249572] env[62383]: DEBUG nova.compute.manager [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Received event network-changed-387d5009-0a75-4cba-9ab0-bbdbd5398119 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 901.249682] env[62383]: DEBUG nova.compute.manager [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Refreshing instance network info cache due to event network-changed-387d5009-0a75-4cba-9ab0-bbdbd5398119. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 901.250086] env[62383]: DEBUG oslo_concurrency.lockutils [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] Acquiring lock "refresh_cache-0f6b7094-27a0-4e97-98ac-bff857124b6c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.250127] env[62383]: DEBUG oslo_concurrency.lockutils [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] Acquired lock "refresh_cache-0f6b7094-27a0-4e97-98ac-bff857124b6c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.250294] env[62383]: DEBUG nova.network.neutron [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Refreshing network info cache for port 387d5009-0a75-4cba-9ab0-bbdbd5398119 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.260715] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5267604b-2db4-4bed-e9e8-48b9d5e111a2, 'name': SearchDatastore_Task, 'duration_secs': 0.06165} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.264601] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.264929] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.265261] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.265412] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.265591] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.266224] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b0cbad6-34d7-485c-be56-1f107b4f394d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.277175] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451864, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.279524] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.279711] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.280926] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b0e3897-2036-428c-a49f-86d8c9e643db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.288895] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 901.288895] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526082b2-231a-8b07-843d-480958193040" [ 901.288895] env[62383]: _type = "Task" [ 901.288895] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.298426] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526082b2-231a-8b07-843d-480958193040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.411253] env[62383]: DEBUG nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 901.414643] env[62383]: DEBUG nova.scheduler.client.report [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.446651] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 901.446963] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.447147] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 901.447341] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.447546] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 901.447705] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 901.447973] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 901.448181] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 901.448396] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 901.448617] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 901.448880] env[62383]: DEBUG nova.virt.hardware [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 901.450388] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dbacaa-6b72-4c8a-bd66-dcb56792cd41 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.468060] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6536785a-c8cf-4acb-8448-7f849cc6cac8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.472779] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451866, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.609413] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451865, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.729583] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451867, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.775312] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451864, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.371342} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.775312] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] b9669bb8-680f-492a-a7c6-82e6edb0a8ed/b9669bb8-680f-492a-a7c6-82e6edb0a8ed.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.775312] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.775312] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e535fd4-be0b-40ad-99a4-68c710e81842 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.788043] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 901.788043] env[62383]: value = "task-2451868" [ 901.788043] env[62383]: _type = "Task" [ 901.788043] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.805217] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.805543] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526082b2-231a-8b07-843d-480958193040, 'name': SearchDatastore_Task, 'duration_secs': 0.019469} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.806503] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5151cb8-41ef-4b79-9e0f-15547ac447f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.812810] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 901.812810] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5266b7a5-6eb4-12f4-ed50-48cac28ff432" [ 901.812810] env[62383]: _type = "Task" [ 901.812810] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.823379] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5266b7a5-6eb4-12f4-ed50-48cac28ff432, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.921012] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.921625] env[62383]: DEBUG nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 901.928021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.194s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.943935] env[62383]: DEBUG nova.network.neutron [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Successfully updated port: e6fce712-4e28-4f58-ad16-497490c0dded {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 901.967884] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451866, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.039169] env[62383]: DEBUG nova.network.neutron [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Updated VIF entry in instance network info cache for port 387d5009-0a75-4cba-9ab0-bbdbd5398119. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 902.039572] env[62383]: DEBUG nova.network.neutron [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Updating instance_info_cache with network_info: [{"id": "387d5009-0a75-4cba-9ab0-bbdbd5398119", "address": "fa:16:3e:ba:36:63", "network": {"id": "3ae77c57-8c24-4aba-943f-e5e0c2471a86", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-404472524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f2b77864c75943b4a625276225c3aac9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaf3dfa2-fa01-4d4d-8ecd-a9bc74d90ec2", "external-id": "nsx-vlan-transportzone-546", "segmentation_id": 546, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap387d5009-0a", "ovs_interfaceid": "387d5009-0a75-4cba-9ab0-bbdbd5398119", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.107896] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451865, 'name': PowerOnVM_Task, 'duration_secs': 1.10567} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.108172] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 902.108377] env[62383]: INFO nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Took 11.26 seconds to spawn the instance on the hypervisor. [ 902.108556] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.109331] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e044ed-1c78-4f31-a493-7110acc42371 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.228722] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451867, 'name': CreateVM_Task, 'duration_secs': 0.565652} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.228901] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.229596] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.229770] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.230198] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.230445] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24826aec-1d32-4c8d-80b2-5a6e86c44cb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.235409] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 902.235409] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524a8cc7-2a94-e106-b1b7-203488ac780e" [ 902.235409] env[62383]: _type = "Task" [ 902.235409] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.242982] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524a8cc7-2a94-e106-b1b7-203488ac780e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.293328] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451868, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.293572] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.294304] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4607b85-fe4d-49b9-95e0-83b19deb9388 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.315233] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] b9669bb8-680f-492a-a7c6-82e6edb0a8ed/b9669bb8-680f-492a-a7c6-82e6edb0a8ed.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.315566] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a1383c0-e96f-40d1-adfc-5e2fd6637fec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.337597] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5266b7a5-6eb4-12f4-ed50-48cac28ff432, 'name': SearchDatastore_Task, 'duration_secs': 0.02744} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.337814] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.338088] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 902.338475] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dcc38e9-c2ab-4432-845d-ce72e50a6f76 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.343412] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 902.343412] env[62383]: value = "task-2451869" [ 902.343412] env[62383]: _type = "Task" [ 902.343412] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.348024] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 902.348024] env[62383]: value = "task-2451870" [ 902.348024] env[62383]: _type = "Task" [ 902.348024] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.354427] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451869, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.360068] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451870, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.430672] env[62383]: INFO nova.compute.claims [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.435211] env[62383]: DEBUG nova.compute.utils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 902.436573] env[62383]: DEBUG nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 902.436739] env[62383]: DEBUG nova.network.neutron [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.447617] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "refresh_cache-188d6b20-3dca-4c1c-8271-1871d2c992d5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.447826] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquired lock "refresh_cache-188d6b20-3dca-4c1c-8271-1871d2c992d5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.447999] env[62383]: DEBUG nova.network.neutron [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.463803] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451866, 'name': ReconfigVM_Task, 'duration_secs': 1.051511} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.464104] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 506afe7c-f19b-4417-b097-485c0244a019/506afe7c-f19b-4417-b097-485c0244a019.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.464800] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f17842b2-3eef-416b-a4d8-c0581e95dae1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.474059] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 902.474059] env[62383]: value = "task-2451871" [ 902.474059] env[62383]: _type = "Task" [ 902.474059] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.476676] env[62383]: DEBUG nova.policy [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec3589360ad54088ad4151a82febcb99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9f2dba3783e48968554ca75be01cd5c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 902.487321] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451871, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.543150] env[62383]: DEBUG oslo_concurrency.lockutils [req-a2426420-5417-42c2-bb78-d67ce6fa27d9 req-fde75a90-b75f-48fe-9213-adc61061bf0f service nova] Releasing lock "refresh_cache-0f6b7094-27a0-4e97-98ac-bff857124b6c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.625627] env[62383]: INFO nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Took 29.51 seconds to build instance. [ 902.744169] env[62383]: DEBUG nova.network.neutron [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Successfully created port: 0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.751032] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524a8cc7-2a94-e106-b1b7-203488ac780e, 'name': SearchDatastore_Task, 'duration_secs': 0.009369} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.751032] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.751032] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.751299] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.751451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.751644] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.751925] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ba47373-5615-4599-a1e6-b3147becde56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.767631] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.767869] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.768692] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e23c6a92-1dd4-4c86-8792-4403e90439fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.775232] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 902.775232] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ce6955-0b82-0a8b-3975-ab50a15dd6c3" [ 902.775232] env[62383]: _type = "Task" [ 902.775232] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.783694] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ce6955-0b82-0a8b-3975-ab50a15dd6c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.860260] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.863656] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451870, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.939941] env[62383]: INFO nova.compute.resource_tracker [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating resource usage from migration f38f2250-33df-4650-ba7b-2012a4623baa [ 902.942261] env[62383]: DEBUG nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 902.981887] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451871, 'name': Rename_Task, 'duration_secs': 0.217916} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.984975] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 902.985462] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-103f82c4-69e5-481c-8242-7f33bba3921d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.994161] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 902.994161] env[62383]: value = "task-2451872" [ 902.994161] env[62383]: _type = "Task" [ 902.994161] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.001400] env[62383]: DEBUG nova.network.neutron [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.010612] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451872, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.127381] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "4d58d2e6-171d-4346-b281-bcbd22286623" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.023s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.196498] env[62383]: DEBUG nova.network.neutron [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Updating instance_info_cache with network_info: [{"id": "e6fce712-4e28-4f58-ad16-497490c0dded", "address": "fa:16:3e:aa:03:a2", "network": {"id": "a67a3c9d-8a85-49cf-939b-9e7460d4f579", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-906469608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a9dddf0be63492aa59c78d5bcddd9fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6fce712-4e", "ovs_interfaceid": "e6fce712-4e28-4f58-ad16-497490c0dded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.279157] env[62383]: DEBUG nova.compute.manager [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Received event network-vif-plugged-e6fce712-4e28-4f58-ad16-497490c0dded {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 903.279629] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] Acquiring lock "188d6b20-3dca-4c1c-8271-1871d2c992d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.279902] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.280098] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.280383] env[62383]: DEBUG nova.compute.manager [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] No waiting events found dispatching network-vif-plugged-e6fce712-4e28-4f58-ad16-497490c0dded {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 903.280437] env[62383]: WARNING nova.compute.manager [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Received unexpected event network-vif-plugged-e6fce712-4e28-4f58-ad16-497490c0dded for instance with vm_state building and task_state spawning. [ 903.280643] env[62383]: DEBUG nova.compute.manager [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Received event network-changed-e6fce712-4e28-4f58-ad16-497490c0dded {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 903.280822] env[62383]: DEBUG nova.compute.manager [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Refreshing instance network info cache due to event network-changed-e6fce712-4e28-4f58-ad16-497490c0dded. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 903.280951] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] Acquiring lock "refresh_cache-188d6b20-3dca-4c1c-8271-1871d2c992d5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.293491] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ce6955-0b82-0a8b-3975-ab50a15dd6c3, 'name': SearchDatastore_Task, 'duration_secs': 0.053647} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.294452] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3dd37c9-6b04-48c7-abbf-bc5198a91c33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.301623] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 903.301623] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5215d53d-cc7d-ae27-7fe0-6e0b79451b45" [ 903.301623] env[62383]: _type = "Task" [ 903.301623] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.309330] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5215d53d-cc7d-ae27-7fe0-6e0b79451b45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.358342] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451869, 'name': ReconfigVM_Task, 'duration_secs': 0.750882} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.360045] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Reconfigured VM instance instance-0000004b to attach disk [datastore2] b9669bb8-680f-492a-a7c6-82e6edb0a8ed/b9669bb8-680f-492a-a7c6-82e6edb0a8ed.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.363673] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d277c82c-3c0a-4fba-849c-139f98fdbdad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.365426] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451870, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699021} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.365525] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.365691] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.366268] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bc9b7410-2dc5-4a09-b30b-9ec516b47931 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.370040] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845b29ff-a907-4529-b068-4082f88d2951 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.376799] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 903.376799] env[62383]: value = "task-2451873" [ 903.376799] env[62383]: _type = "Task" [ 903.376799] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.377116] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 903.377116] env[62383]: value = "task-2451874" [ 903.377116] env[62383]: _type = "Task" [ 903.377116] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.388454] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa0263ad-6055-4d12-93db-a6524b210922 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.397344] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451873, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.425448] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451874, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.426899] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7c892e-f1e4-4066-a818-21f2ae462d19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.434157] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368ac288-23a0-4622-a251-0b1170ab85e0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.448177] env[62383]: DEBUG nova.compute.provider_tree [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.504147] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451872, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.698929] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Releasing lock "refresh_cache-188d6b20-3dca-4c1c-8271-1871d2c992d5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.699329] env[62383]: DEBUG nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Instance network_info: |[{"id": "e6fce712-4e28-4f58-ad16-497490c0dded", "address": "fa:16:3e:aa:03:a2", "network": {"id": "a67a3c9d-8a85-49cf-939b-9e7460d4f579", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-906469608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a9dddf0be63492aa59c78d5bcddd9fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6fce712-4e", "ovs_interfaceid": "e6fce712-4e28-4f58-ad16-497490c0dded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 903.699646] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] Acquired lock "refresh_cache-188d6b20-3dca-4c1c-8271-1871d2c992d5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.699829] env[62383]: DEBUG nova.network.neutron [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Refreshing network info cache for port e6fce712-4e28-4f58-ad16-497490c0dded {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 903.701784] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:03:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6fce712-4e28-4f58-ad16-497490c0dded', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 903.710222] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Creating folder: Project (9a9dddf0be63492aa59c78d5bcddd9fd). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.711315] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d45c8084-336b-4e32-8adc-c145b5532c0a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.723445] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Created folder: Project (9a9dddf0be63492aa59c78d5bcddd9fd) in parent group-v496304. [ 903.723648] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Creating folder: Instances. Parent ref: group-v496517. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 903.723885] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81b91ecd-d256-4cb0-a367-c42ccace6b27 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.732901] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Created folder: Instances in parent group-v496517. [ 903.733174] env[62383]: DEBUG oslo.service.loopingcall [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.733389] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 903.733629] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47d57a4d-27a5-4a97-aff4-acdea7d312e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.761727] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 903.761727] env[62383]: value = "task-2451877" [ 903.761727] env[62383]: _type = "Task" [ 903.761727] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.769224] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451877, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.814633] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5215d53d-cc7d-ae27-7fe0-6e0b79451b45, 'name': SearchDatastore_Task, 'duration_secs': 0.05234} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.814900] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.815186] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0f6b7094-27a0-4e97-98ac-bff857124b6c/0f6b7094-27a0-4e97-98ac-bff857124b6c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 903.815440] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6eb72e36-6505-489f-b83a-e017d7ff3a2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.824841] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 903.824841] env[62383]: value = "task-2451878" [ 903.824841] env[62383]: _type = "Task" [ 903.824841] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.834134] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.890948] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451873, 'name': Rename_Task, 'duration_secs': 0.348063} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.894020] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.894331] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451874, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11823} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.894544] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c03f19b7-7683-467c-b400-67a7825baeb1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.896182] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.896943] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7fad52-ac48-4e20-88dc-de199e656b16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.919876] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.923630] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00ef5de2-90d4-4869-bad2-e1f4b42f08be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.937811] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 903.937811] env[62383]: value = "task-2451879" [ 903.937811] env[62383]: _type = "Task" [ 903.937811] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.943693] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 903.943693] env[62383]: value = "task-2451880" [ 903.943693] env[62383]: _type = "Task" [ 903.943693] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.947127] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451879, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.956343] env[62383]: DEBUG nova.scheduler.client.report [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.960308] env[62383]: DEBUG nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 903.962494] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451880, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.992091] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 903.992091] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.992289] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 903.992457] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.992612] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 903.992760] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 903.992990] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 903.993187] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 903.993479] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 903.993563] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 903.993864] env[62383]: DEBUG nova.virt.hardware [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 903.994932] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8bc6ed-765b-4c4c-abb6-8c6ecff7ca22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.011882] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33d1dc5-525e-4d64-98d9-ae192f1c32ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.018256] env[62383]: DEBUG oslo_vmware.api [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451872, 'name': PowerOnVM_Task, 'duration_secs': 0.963105} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.018673] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.018978] env[62383]: INFO nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Took 18.28 seconds to spawn the instance on the hypervisor. [ 904.019304] env[62383]: DEBUG nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.021867] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d86d839-e317-4518-b8ec-49e84a3c9c82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.272851] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451877, 'name': CreateVM_Task, 'duration_secs': 0.436519} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.273132] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 904.274087] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.274303] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.274737] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 904.276164] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-749edeaa-c33a-43fc-b153-587c0e502b0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.281718] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 904.281718] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5229cb3d-6e8d-0377-85a3-c0134b7ea262" [ 904.281718] env[62383]: _type = "Task" [ 904.281718] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.294396] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5229cb3d-6e8d-0377-85a3-c0134b7ea262, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.340426] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451878, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.455643] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451879, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.461779] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451880, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.465057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.540s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.465336] env[62383]: INFO nova.compute.manager [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Migrating [ 904.473996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.178s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.474270] env[62383]: DEBUG nova.objects.instance [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lazy-loading 'resources' on Instance uuid 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.556706] env[62383]: INFO nova.compute.manager [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Took 36.67 seconds to build instance. [ 904.586790] env[62383]: DEBUG nova.network.neutron [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Successfully updated port: 0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.614114] env[62383]: DEBUG nova.network.neutron [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Updated VIF entry in instance network info cache for port e6fce712-4e28-4f58-ad16-497490c0dded. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 904.614210] env[62383]: DEBUG nova.network.neutron [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Updating instance_info_cache with network_info: [{"id": "e6fce712-4e28-4f58-ad16-497490c0dded", "address": "fa:16:3e:aa:03:a2", "network": {"id": "a67a3c9d-8a85-49cf-939b-9e7460d4f579", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-906469608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a9dddf0be63492aa59c78d5bcddd9fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6fce712-4e", "ovs_interfaceid": "e6fce712-4e28-4f58-ad16-497490c0dded", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.792628] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5229cb3d-6e8d-0377-85a3-c0134b7ea262, 'name': SearchDatastore_Task, 'duration_secs': 0.056022} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.792925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.793185] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 904.793430] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.793580] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.793761] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 904.794036] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29de0144-dd27-4ea8-a7cc-a2203ddffd50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.810164] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 904.810349] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 904.811080] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daaea167-710b-42f7-a7cc-ab007921b740 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.816319] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 904.816319] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]523e7ea2-7856-4ac3-94c2-5de99dc37fbc" [ 904.816319] env[62383]: _type = "Task" [ 904.816319] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.823773] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523e7ea2-7856-4ac3-94c2-5de99dc37fbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.833709] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451878, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589908} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.833959] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0f6b7094-27a0-4e97-98ac-bff857124b6c/0f6b7094-27a0-4e97-98ac-bff857124b6c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 904.834202] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 904.834442] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70bdb392-7df0-4e78-a3d9-02e660a8e60d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.864823] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 904.864823] env[62383]: value = "task-2451881" [ 904.864823] env[62383]: _type = "Task" [ 904.864823] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.948491] env[62383]: DEBUG oslo_vmware.api [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451879, 'name': PowerOnVM_Task, 'duration_secs': 0.723713} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.951636] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.951890] env[62383]: INFO nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Took 11.49 seconds to spawn the instance on the hypervisor. [ 904.952100] env[62383]: DEBUG nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.952867] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f264bd-48f4-452a-8a87-b064dcd47c19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.965168] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451880, 'name': ReconfigVM_Task, 'duration_secs': 0.550896} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.965644] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.966229] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cd442c4-c5f7-472e-a45e-4ecb3a4374e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.975800] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 904.975800] env[62383]: value = "task-2451882" [ 904.975800] env[62383]: _type = "Task" [ 904.975800] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.988657] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "506afe7c-f19b-4417-b097-485c0244a019" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.988657] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.988657] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.988657] env[62383]: DEBUG nova.network.neutron [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.990152] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451882, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.059180] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bf03cdf3-e996-4acd-94b6-19b5a2e34482 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "506afe7c-f19b-4417-b097-485c0244a019" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.185s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.059492] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "506afe7c-f19b-4417-b097-485c0244a019" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.071s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.059623] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "506afe7c-f19b-4417-b097-485c0244a019-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.059921] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "506afe7c-f19b-4417-b097-485c0244a019-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.060111] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "506afe7c-f19b-4417-b097-485c0244a019-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.062683] env[62383]: INFO nova.compute.manager [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Terminating instance [ 905.090300] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.090428] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.090629] env[62383]: DEBUG nova.network.neutron [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.120876] env[62383]: DEBUG oslo_concurrency.lockutils [req-7bbb1630-f9af-42a3-a814-bf5a62448e38 req-e96d6e43-426f-409d-b9b7-189ef29a7b87 service nova] Releasing lock "refresh_cache-188d6b20-3dca-4c1c-8271-1871d2c992d5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.305713] env[62383]: DEBUG nova.compute.manager [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received event network-vif-plugged-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 905.305897] env[62383]: DEBUG oslo_concurrency.lockutils [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.306097] env[62383]: DEBUG oslo_concurrency.lockutils [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.306279] env[62383]: DEBUG oslo_concurrency.lockutils [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.306450] env[62383]: DEBUG nova.compute.manager [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] No waiting events found dispatching network-vif-plugged-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 905.306611] env[62383]: WARNING nova.compute.manager [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received unexpected event network-vif-plugged-0afca8d2-b019-4a25-af28-7061dbf32e28 for instance with vm_state building and task_state spawning. [ 905.306807] env[62383]: DEBUG nova.compute.manager [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received event network-changed-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 905.306984] env[62383]: DEBUG nova.compute.manager [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Refreshing instance network info cache due to event network-changed-0afca8d2-b019-4a25-af28-7061dbf32e28. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 905.307165] env[62383]: DEBUG oslo_concurrency.lockutils [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] Acquiring lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.322042] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a953bc-08ae-4ac4-bc2a-3886962c2106 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.330097] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]523e7ea2-7856-4ac3-94c2-5de99dc37fbc, 'name': SearchDatastore_Task, 'duration_secs': 0.035933} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.332345] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f8a175-ac38-40b9-98bc-4f3730b803c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.335008] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48aaa99-70ad-44ed-a7ad-a4fde35c36a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.342729] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 905.342729] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522d9abb-1db2-e76a-061c-9724a80a6b59" [ 905.342729] env[62383]: _type = "Task" [ 905.342729] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.372792] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f4f457-bf56-48bb-91d9-2cbe15274200 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.382177] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522d9abb-1db2-e76a-061c-9724a80a6b59, 'name': SearchDatastore_Task, 'duration_secs': 0.036342} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.386873] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.387161] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 188d6b20-3dca-4c1c-8271-1871d2c992d5/188d6b20-3dca-4c1c-8271-1871d2c992d5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 905.387483] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122288} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.387760] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eec17695-971b-4c28-a9b8-bc282c9ce18e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.389785] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.391090] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3923601-752a-4698-9fc3-6373e41fa2bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.397346] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69245728-e87c-471a-9608-4b9532588d28 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.412815] env[62383]: DEBUG nova.compute.provider_tree [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.432604] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] 0f6b7094-27a0-4e97-98ac-bff857124b6c/0f6b7094-27a0-4e97-98ac-bff857124b6c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.433838] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-310ab822-f475-49a9-a081-a2f4aa0ecf79 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.458183] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 905.458183] env[62383]: value = "task-2451883" [ 905.458183] env[62383]: _type = "Task" [ 905.458183] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.462913] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 905.462913] env[62383]: value = "task-2451884" [ 905.462913] env[62383]: _type = "Task" [ 905.462913] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.474019] env[62383]: INFO nova.compute.manager [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Took 32.33 seconds to build instance. [ 905.478202] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451883, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.485065] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451884, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.489970] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451882, 'name': Rename_Task, 'duration_secs': 0.506968} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.490729] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.490729] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1aa27d26-16c4-4889-8327-70fc4aa6b12f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.498426] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 905.498426] env[62383]: value = "task-2451885" [ 905.498426] env[62383]: _type = "Task" [ 905.498426] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.508009] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.568384] env[62383]: DEBUG nova.compute.manager [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 905.568620] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.569653] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c1c9d0-4fc2-4c06-a9b9-0d7cb8da65db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.580334] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.580607] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0889c6cf-5e93-4ffe-b665-50e05a5a4d6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.588782] env[62383]: DEBUG oslo_vmware.api [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 905.588782] env[62383]: value = "task-2451886" [ 905.588782] env[62383]: _type = "Task" [ 905.588782] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.600805] env[62383]: DEBUG oslo_vmware.api [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.640971] env[62383]: DEBUG nova.network.neutron [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.812921] env[62383]: DEBUG nova.network.neutron [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance_info_cache with network_info: [{"id": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "address": "fa:16:3e:ee:69:75", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap913ca293-96", "ovs_interfaceid": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.839257] env[62383]: DEBUG nova.network.neutron [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": "0afca8d2-b019-4a25-af28-7061dbf32e28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.935082] env[62383]: DEBUG nova.scheduler.client.report [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.973251] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451883, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.976785] env[62383]: DEBUG oslo_concurrency.lockutils [None req-798de6b8-a34b-4a32-aa00-2c433a997595 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.843s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.977386] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451884, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.008586] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451885, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.107610] env[62383]: DEBUG oslo_vmware.api [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451886, 'name': PowerOffVM_Task, 'duration_secs': 0.224185} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.108069] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.108371] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 906.108710] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea289e8a-a287-4f8a-be7a-bee822b69886 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.207452] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.207685] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.207868] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleting the datastore file [datastore2] 506afe7c-f19b-4417-b097-485c0244a019 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.208151] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d82ae79b-b1e6-4bb8-9e1c-1bb69dbf9dac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.216031] env[62383]: DEBUG oslo_vmware.api [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 906.216031] env[62383]: value = "task-2451888" [ 906.216031] env[62383]: _type = "Task" [ 906.216031] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.223445] env[62383]: DEBUG oslo_vmware.api [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.316831] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.342036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.342183] env[62383]: DEBUG nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Instance network_info: |[{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": "0afca8d2-b019-4a25-af28-7061dbf32e28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 906.342467] env[62383]: DEBUG oslo_concurrency.lockutils [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] Acquired lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.342649] env[62383]: DEBUG nova.network.neutron [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Refreshing network info cache for port 0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.344105] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:5a:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b36c5ae6-c344-4bd1-8239-29128e2bbfbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0afca8d2-b019-4a25-af28-7061dbf32e28', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 906.351919] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating folder: Project (b9f2dba3783e48968554ca75be01cd5c). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 906.353031] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bd203e0-7313-4a63-90a1-0cffce1ff179 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.369424] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Created folder: Project (b9f2dba3783e48968554ca75be01cd5c) in parent group-v496304. [ 906.369643] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating folder: Instances. Parent ref: group-v496520. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 906.369882] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d57d20d8-ea5d-464f-be80-4401443e5379 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.384076] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Created folder: Instances in parent group-v496520. [ 906.384076] env[62383]: DEBUG oslo.service.loopingcall [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.384248] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 906.384608] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33c6fbf4-d4bd-459d-bca2-494d109169fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.410097] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 906.410097] env[62383]: value = "task-2451891" [ 906.410097] env[62383]: _type = "Task" [ 906.410097] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.440338] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.442826] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.056s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.443074] env[62383]: DEBUG nova.objects.instance [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lazy-loading 'resources' on Instance uuid e41f5c22-44e0-4de8-a4d0-865fe2c6febd {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.469812] env[62383]: INFO nova.scheduler.client.report [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Deleted allocations for instance 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a [ 906.474836] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451883, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.728725} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.477495] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 188d6b20-3dca-4c1c-8271-1871d2c992d5/188d6b20-3dca-4c1c-8271-1871d2c992d5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 906.477856] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 906.481153] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f90420d-897f-470c-9ce8-980f73058521 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.483245] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451884, 'name': ReconfigVM_Task, 'duration_secs': 0.882755} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.483505] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Reconfigured VM instance instance-0000004d to attach disk [datastore2] 0f6b7094-27a0-4e97-98ac-bff857124b6c/0f6b7094-27a0-4e97-98ac-bff857124b6c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.484772] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db0b4714-701d-4f87-b895-57d4b6051e08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.491241] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 906.491241] env[62383]: value = "task-2451892" [ 906.491241] env[62383]: _type = "Task" [ 906.491241] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.492636] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 906.492636] env[62383]: value = "task-2451893" [ 906.492636] env[62383]: _type = "Task" [ 906.492636] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.514114] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451893, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.514983] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.520315] env[62383]: DEBUG oslo_vmware.api [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451885, 'name': PowerOnVM_Task, 'duration_secs': 1.00719} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.520665] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.520953] env[62383]: INFO nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Took 10.43 seconds to spawn the instance on the hypervisor. [ 906.521245] env[62383]: DEBUG nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 906.522182] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ee33f1-8684-46a4-91f4-2cff353006ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.609234] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "4d58d2e6-171d-4346-b281-bcbd22286623" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.609537] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "4d58d2e6-171d-4346-b281-bcbd22286623" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.609791] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "4d58d2e6-171d-4346-b281-bcbd22286623-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.609984] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "4d58d2e6-171d-4346-b281-bcbd22286623-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.610209] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "4d58d2e6-171d-4346-b281-bcbd22286623-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.613349] env[62383]: INFO nova.compute.manager [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Terminating instance [ 906.673274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.673565] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.673787] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.674011] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.674265] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.676506] env[62383]: INFO nova.compute.manager [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Terminating instance [ 906.724496] env[62383]: DEBUG oslo_vmware.api [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151313} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.724769] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.724922] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.725119] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.725296] env[62383]: INFO nova.compute.manager [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Took 1.16 seconds to destroy the instance on the hypervisor. [ 906.725535] env[62383]: DEBUG oslo.service.loopingcall [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.725721] env[62383]: DEBUG nova.compute.manager [-] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.725815] env[62383]: DEBUG nova.network.neutron [-] [instance: 506afe7c-f19b-4417-b097-485c0244a019] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.919553] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451891, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.989694] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2ca81f0-75f4-40cf-b5ac-ef3c7c25a0c0 tempest-ServersTestFqdnHostnames-335775192 tempest-ServersTestFqdnHostnames-335775192-project-member] Lock "2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.765s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.011320] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088741} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.011320] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 907.014302] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782baba8-3e1c-4102-a318-7c64562179ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.017245] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451893, 'name': Rename_Task, 'duration_secs': 0.232527} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.019774] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 907.020725] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5be1c4cd-2134-486b-a6bb-0913e73066b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.040222] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 188d6b20-3dca-4c1c-8271-1871d2c992d5/188d6b20-3dca-4c1c-8271-1871d2c992d5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.052156] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b690b9de-8678-43b4-84ba-ddd595eac169 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.070145] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 907.070145] env[62383]: value = "task-2451894" [ 907.070145] env[62383]: _type = "Task" [ 907.070145] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.070729] env[62383]: INFO nova.compute.manager [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Took 33.51 seconds to build instance. [ 907.077201] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 907.077201] env[62383]: value = "task-2451895" [ 907.077201] env[62383]: _type = "Task" [ 907.077201] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.084095] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451894, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.092728] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451895, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.118607] env[62383]: DEBUG nova.compute.manager [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.118897] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.120097] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deed47b6-b693-4cd4-b3fc-323f12e2870a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.131395] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.131570] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d87a8d8-74aa-4885-a8b9-b5acc4f55da5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.144719] env[62383]: DEBUG oslo_vmware.api [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 907.144719] env[62383]: value = "task-2451896" [ 907.144719] env[62383]: _type = "Task" [ 907.144719] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.153627] env[62383]: DEBUG oslo_vmware.api [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.180832] env[62383]: DEBUG nova.compute.manager [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 907.181125] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.182448] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456feb4e-57b8-4f4a-817b-9dfc45943979 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.193254] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.196481] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2095deb1-e0cf-4167-91cd-7833e8036a8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.203634] env[62383]: DEBUG oslo_vmware.api [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 907.203634] env[62383]: value = "task-2451897" [ 907.203634] env[62383]: _type = "Task" [ 907.203634] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.212603] env[62383]: DEBUG oslo_vmware.api [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.231566] env[62383]: DEBUG nova.network.neutron [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updated VIF entry in instance network info cache for port 0afca8d2-b019-4a25-af28-7061dbf32e28. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 907.232034] env[62383]: DEBUG nova.network.neutron [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": "0afca8d2-b019-4a25-af28-7061dbf32e28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.344746] env[62383]: DEBUG nova.compute.manager [req-3982dfa6-4570-4ea1-8089-25c1cdee1267 req-c6c5268a-9023-44c8-a7df-ff14c579df6b service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Received event network-vif-deleted-b32e6644-0a0d-489f-810c-598958631523 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 907.344746] env[62383]: INFO nova.compute.manager [req-3982dfa6-4570-4ea1-8089-25c1cdee1267 req-c6c5268a-9023-44c8-a7df-ff14c579df6b service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Neutron deleted interface b32e6644-0a0d-489f-810c-598958631523; detaching it from the instance and deleting it from the info cache [ 907.344746] env[62383]: DEBUG nova.network.neutron [req-3982dfa6-4570-4ea1-8089-25c1cdee1267 req-c6c5268a-9023-44c8-a7df-ff14c579df6b service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.413540] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b45a7c73-dbe5-4a47-a1a1-2cdfc3ae9f76 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.433249] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de56803c-fd8c-4c57-8cc6-bb4730142c4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.436719] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451891, 'name': CreateVM_Task, 'duration_secs': 0.599019} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.437271] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 907.438477] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.438654] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.439053] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 907.439370] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a002d16e-2c6e-4336-8eb4-85b58bc02407 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.472276] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da0c198-1599-40fa-830a-36a72df0b501 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.477506] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 907.477506] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5289c1e0-7fc0-14cf-f00a-2311d651c7aa" [ 907.477506] env[62383]: _type = "Task" [ 907.477506] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.487033] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d422db39-dfe9-44b8-9255-c615d535f7a0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.494837] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5289c1e0-7fc0-14cf-f00a-2311d651c7aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.505503] env[62383]: DEBUG nova.compute.provider_tree [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.572993] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e0bf880-ed96-421f-8b40-c862e9099cda tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.026s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.586197] env[62383]: DEBUG oslo_vmware.api [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451894, 'name': PowerOnVM_Task, 'duration_secs': 0.530766} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.586689] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.586980] env[62383]: INFO nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Took 8.81 seconds to spawn the instance on the hypervisor. [ 907.587280] env[62383]: DEBUG nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.588572] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b98bf0f-fbd6-4682-a1ff-11b378be7b6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.594294] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451895, 'name': ReconfigVM_Task, 'duration_secs': 0.371458} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.594995] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 188d6b20-3dca-4c1c-8271-1871d2c992d5/188d6b20-3dca-4c1c-8271-1871d2c992d5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 907.595700] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8af43751-27b2-4b79-9c68-ea8cfa80ea5a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.607189] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 907.607189] env[62383]: value = "task-2451898" [ 907.607189] env[62383]: _type = "Task" [ 907.607189] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.615110] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451898, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.655105] env[62383]: DEBUG oslo_vmware.api [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451896, 'name': PowerOffVM_Task, 'duration_secs': 0.226442} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.655480] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.655684] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.655962] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3891d14-10b7-45e9-b41b-78107bced032 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.680025] env[62383]: DEBUG nova.network.neutron [-] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.716162] env[62383]: DEBUG oslo_vmware.api [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451897, 'name': PowerOffVM_Task, 'duration_secs': 0.210605} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.716162] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.716162] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.716162] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb167e6e-8e30-4847-8dc5-551d8c4b0433 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.731028] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.731028] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.731028] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleting the datastore file [datastore2] 4d58d2e6-171d-4346-b281-bcbd22286623 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.731028] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05cdec4c-c905-4cd8-b2cd-96e6b384eb6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.735253] env[62383]: DEBUG oslo_concurrency.lockutils [req-a041745e-dcaa-4fff-9e3f-1beb48cc27d2 req-4489d11b-6b6f-4235-b797-de3a53572a55 service nova] Releasing lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.735927] env[62383]: DEBUG oslo_vmware.api [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 907.735927] env[62383]: value = "task-2451901" [ 907.735927] env[62383]: _type = "Task" [ 907.735927] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.745524] env[62383]: DEBUG oslo_vmware.api [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.785139] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.786212] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.786212] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleting the datastore file [datastore2] b9669bb8-680f-492a-a7c6-82e6edb0a8ed {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.786212] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9c4ba54-51da-4ff0-87e2-a71f5845451a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.792582] env[62383]: DEBUG oslo_vmware.api [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 907.792582] env[62383]: value = "task-2451902" [ 907.792582] env[62383]: _type = "Task" [ 907.792582] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.808523] env[62383]: DEBUG oslo_vmware.api [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.833429] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59407a45-2af7-4875-a447-0858848936a2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.855669] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance '81921762-ac51-42d2-83dc-d5b6e904fbb7' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 907.859487] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d007a93f-4bb5-4fcd-ac25-49da0702d031 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.868672] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3948b1-e223-416a-951a-1c4b6d456578 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.905287] env[62383]: DEBUG nova.compute.manager [req-3982dfa6-4570-4ea1-8089-25c1cdee1267 req-c6c5268a-9023-44c8-a7df-ff14c579df6b service nova] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Detach interface failed, port_id=b32e6644-0a0d-489f-810c-598958631523, reason: Instance 506afe7c-f19b-4417-b097-485c0244a019 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 907.990355] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5289c1e0-7fc0-14cf-f00a-2311d651c7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.017398} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.990730] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.991031] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.991382] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.991598] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.991819] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.992126] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-509c1e3a-2c5f-4a67-b443-027399353bb5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.001032] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 908.001271] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 908.002288] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9092e5ba-6f4f-4513-b378-32c130eadfb3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.008575] env[62383]: DEBUG nova.scheduler.client.report [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.012205] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 908.012205] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5203c78a-6d1d-6ab0-55a5-217e4b25c533" [ 908.012205] env[62383]: _type = "Task" [ 908.012205] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.024985] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5203c78a-6d1d-6ab0-55a5-217e4b25c533, 'name': SearchDatastore_Task, 'duration_secs': 0.00857} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.027945] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce05577b-0051-4ad9-ae72-4380e04519d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.036296] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 908.036296] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b9d56c-a371-968c-474d-56626c367f64" [ 908.036296] env[62383]: _type = "Task" [ 908.036296] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.045884] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b9d56c-a371-968c-474d-56626c367f64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.113851] env[62383]: INFO nova.compute.manager [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Took 30.72 seconds to build instance. [ 908.119655] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451898, 'name': Rename_Task, 'duration_secs': 0.237499} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.119942] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 908.120379] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51e6be99-b1e6-4963-80f9-cbe143cf6af8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.126266] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 908.126266] env[62383]: value = "task-2451903" [ 908.126266] env[62383]: _type = "Task" [ 908.126266] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.134363] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451903, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.181537] env[62383]: INFO nova.compute.manager [-] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Took 1.45 seconds to deallocate network for instance. [ 908.252025] env[62383]: DEBUG oslo_vmware.api [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156755} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.252025] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.252025] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.252025] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.252025] env[62383]: INFO nova.compute.manager [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Took 1.13 seconds to destroy the instance on the hypervisor. [ 908.252330] env[62383]: DEBUG oslo.service.loopingcall [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.252330] env[62383]: DEBUG nova.compute.manager [-] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.252330] env[62383]: DEBUG nova.network.neutron [-] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.302965] env[62383]: DEBUG oslo_vmware.api [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2451902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213587} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.303243] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.303428] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.303604] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.303776] env[62383]: INFO nova.compute.manager [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Took 1.12 seconds to destroy the instance on the hypervisor. [ 908.304027] env[62383]: DEBUG oslo.service.loopingcall [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.304223] env[62383]: DEBUG nova.compute.manager [-] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.304317] env[62383]: DEBUG nova.network.neutron [-] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.365279] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.365544] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3957561-5bee-4053-a021-a7bf25ec8e63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.373144] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 908.373144] env[62383]: value = "task-2451904" [ 908.373144] env[62383]: _type = "Task" [ 908.373144] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.384777] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.517025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.520996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.125s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.522222] env[62383]: INFO nova.compute.claims [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.547747] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b9d56c-a371-968c-474d-56626c367f64, 'name': SearchDatastore_Task, 'duration_secs': 0.008653} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.548106] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.548272] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.548959] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-147420ec-a8c8-4ae2-8d64-722bb1328d69 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.555806] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 908.555806] env[62383]: value = "task-2451905" [ 908.555806] env[62383]: _type = "Task" [ 908.555806] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.565451] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.580123] env[62383]: INFO nova.scheduler.client.report [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Deleted allocations for instance e41f5c22-44e0-4de8-a4d0-865fe2c6febd [ 908.617317] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a31713b-4edb-45d1-b7b0-55a375d37956 tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.236s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.640702] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451903, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.688991] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.765560] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.765758] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.765958] env[62383]: DEBUG nova.compute.manager [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 908.767020] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57729f6f-116e-426a-8138-927181ff394b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.775401] env[62383]: DEBUG nova.compute.manager [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 908.776018] env[62383]: DEBUG nova.objects.instance [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'flavor' on Instance uuid 23d24da6-c7d8-4d6a-8442-a1066505aab1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.885308] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451904, 'name': PowerOffVM_Task, 'duration_secs': 0.193545} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.885820] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.886175] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance '81921762-ac51-42d2-83dc-d5b6e904fbb7' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 909.066277] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502984} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.066532] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.066738] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.066985] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c90de9f-f060-4c42-83e5-fc0e7bb28f8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.072602] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 909.072602] env[62383]: value = "task-2451906" [ 909.072602] env[62383]: _type = "Task" [ 909.072602] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.080750] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451906, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.093647] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02624a78-8efd-423b-811a-0e3910361767 tempest-ServersWithSpecificFlavorTestJSON-1929359405 tempest-ServersWithSpecificFlavorTestJSON-1929359405-project-member] Lock "e41f5c22-44e0-4de8-a4d0-865fe2c6febd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.442s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.105411] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "0f6b7094-27a0-4e97-98ac-bff857124b6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.106070] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.106070] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "0f6b7094-27a0-4e97-98ac-bff857124b6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.106237] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.106449] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.108689] env[62383]: INFO nova.compute.manager [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Terminating instance [ 909.136240] env[62383]: DEBUG oslo_vmware.api [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451903, 'name': PowerOnVM_Task, 'duration_secs': 0.718812} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.136493] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.136690] env[62383]: INFO nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Took 7.73 seconds to spawn the instance on the hypervisor. [ 909.136870] env[62383]: DEBUG nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 909.137651] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a308d7b8-7d1e-4669-847d-e217d4e18adf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.362189] env[62383]: DEBUG nova.network.neutron [-] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.393263] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 909.395502] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.395502] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 909.395502] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.395502] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 909.395502] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 909.395821] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 909.395821] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 909.395821] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 909.395821] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 909.395821] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 909.402202] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e6ed147-7662-4399-98f6-bfefc3b7e19f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.414354] env[62383]: DEBUG nova.compute.manager [req-97392111-a547-4dbc-bf7d-47a41a0c3815 req-238db67f-f169-4aa9-80c8-0f5ead0733df service nova] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Received event network-vif-deleted-3ac42380-e58c-4b6c-81b1-92660033b445 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 909.414570] env[62383]: DEBUG nova.compute.manager [req-97392111-a547-4dbc-bf7d-47a41a0c3815 req-238db67f-f169-4aa9-80c8-0f5ead0733df service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Received event network-vif-deleted-33512c3d-a673-4233-8a74-a0972714fa89 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 909.414724] env[62383]: INFO nova.compute.manager [req-97392111-a547-4dbc-bf7d-47a41a0c3815 req-238db67f-f169-4aa9-80c8-0f5ead0733df service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Neutron deleted interface 33512c3d-a673-4233-8a74-a0972714fa89; detaching it from the instance and deleting it from the info cache [ 909.414892] env[62383]: DEBUG nova.network.neutron [req-97392111-a547-4dbc-bf7d-47a41a0c3815 req-238db67f-f169-4aa9-80c8-0f5ead0733df service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.420891] env[62383]: DEBUG nova.network.neutron [-] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.429026] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 909.429026] env[62383]: value = "task-2451907" [ 909.429026] env[62383]: _type = "Task" [ 909.429026] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.440463] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451907, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.587069] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451906, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068408} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.589445] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.589445] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83a07dd-4c5a-4b45-9a3b-8b1ecb4873a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.614682] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.618959] env[62383]: DEBUG nova.compute.manager [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.619271] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.619590] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-053a3aa1-d719-4f96-8872-fce6d7295dec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.639062] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fb3ef4-9083-49fa-a8b7-28aa03792dea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.653536] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.661334] env[62383]: INFO nova.compute.manager [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Took 30.66 seconds to build instance. [ 909.662805] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2ced16f-2421-4e30-bef5-f36a96f82584 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.664432] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 909.664432] env[62383]: value = "task-2451908" [ 909.664432] env[62383]: _type = "Task" [ 909.664432] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.669943] env[62383]: DEBUG oslo_vmware.api [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 909.669943] env[62383]: value = "task-2451909" [ 909.669943] env[62383]: _type = "Task" [ 909.669943] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.677656] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.686076] env[62383]: DEBUG oslo_vmware.api [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.788213] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.788629] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f389fa2a-1fe3-42be-bd53-7e6ab7c59795 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.797168] env[62383]: DEBUG oslo_vmware.api [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 909.797168] env[62383]: value = "task-2451910" [ 909.797168] env[62383]: _type = "Task" [ 909.797168] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.808366] env[62383]: DEBUG oslo_vmware.api [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.865164] env[62383]: INFO nova.compute.manager [-] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Took 1.61 seconds to deallocate network for instance. [ 909.925245] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77813673-2432-414b-ba6b-03967a04c7f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.926705] env[62383]: INFO nova.compute.manager [-] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Took 1.62 seconds to deallocate network for instance. [ 909.942386] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b10c1c-6a99-49e4-862a-ee2c2f4333c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.957633] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451907, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.981520] env[62383]: DEBUG nova.compute.manager [req-97392111-a547-4dbc-bf7d-47a41a0c3815 req-238db67f-f169-4aa9-80c8-0f5ead0733df service nova] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Detach interface failed, port_id=33512c3d-a673-4233-8a74-a0972714fa89, reason: Instance b9669bb8-680f-492a-a7c6-82e6edb0a8ed could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 910.035407] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d501585-a594-43d2-84ba-68474a8c3dbc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.044123] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273155eb-d0cb-417a-a339-8f59ce07670c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.077096] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "188d6b20-3dca-4c1c-8271-1871d2c992d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.078098] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4021beae-1cb7-4590-b1ad-456065d9873f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.085284] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b42bcc-e2f5-45b6-a6a2-dc9222edeb2a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.098992] env[62383]: DEBUG nova.compute.provider_tree [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.164278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c4f8a60-30ab-40a3-828e-fdb8940cea13 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.175s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.164278] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.087s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.164474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "188d6b20-3dca-4c1c-8271-1871d2c992d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.164598] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.164771] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.170577] env[62383]: INFO nova.compute.manager [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Terminating instance [ 910.182897] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.188317] env[62383]: DEBUG oslo_vmware.api [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451909, 'name': PowerOffVM_Task, 'duration_secs': 0.367933} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.188579] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.188654] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.188937] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57b0ae37-d067-45b5-961d-04258ce1c23d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.257991] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.258277] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.258472] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleting the datastore file [datastore2] 0f6b7094-27a0-4e97-98ac-bff857124b6c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.258725] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acc662f3-bf5b-40fd-8b6f-120c010561b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.265674] env[62383]: DEBUG oslo_vmware.api [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for the task: (returnval){ [ 910.265674] env[62383]: value = "task-2451912" [ 910.265674] env[62383]: _type = "Task" [ 910.265674] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.273207] env[62383]: DEBUG oslo_vmware.api [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.306557] env[62383]: DEBUG oslo_vmware.api [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451910, 'name': PowerOffVM_Task, 'duration_secs': 0.248364} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.306912] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.307262] env[62383]: DEBUG nova.compute.manager [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.308027] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8ed08f-4d2f-49cc-b2bc-739c784bec16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.374037] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.438368] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.441856] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451907, 'name': ReconfigVM_Task, 'duration_secs': 0.884127} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.442194] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance '81921762-ac51-42d2-83dc-d5b6e904fbb7' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 910.605768] env[62383]: DEBUG nova.scheduler.client.report [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.676040] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451908, 'name': ReconfigVM_Task, 'duration_secs': 0.59831} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.676330] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Reconfigured VM instance instance-0000004f to attach disk [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.676978] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d42b005-f99b-4025-a299-1d1fdf2a2118 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.684132] env[62383]: DEBUG nova.compute.manager [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.684132] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.684268] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 910.684268] env[62383]: value = "task-2451913" [ 910.684268] env[62383]: _type = "Task" [ 910.684268] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.684973] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b40652e-84a9-4b7f-889f-66ab1ad9e34c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.696204] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451913, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.698232] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.698460] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab294ba4-1af9-4bf8-82bf-b430ae705ff3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.712851] env[62383]: DEBUG oslo_vmware.api [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 910.712851] env[62383]: value = "task-2451914" [ 910.712851] env[62383]: _type = "Task" [ 910.712851] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.721143] env[62383]: DEBUG oslo_vmware.api [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.781862] env[62383]: DEBUG oslo_vmware.api [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Task: {'id': task-2451912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173399} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.782198] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.782452] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.782559] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.782644] env[62383]: INFO nova.compute.manager [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 910.782889] env[62383]: DEBUG oslo.service.loopingcall [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 910.783089] env[62383]: DEBUG nova.compute.manager [-] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 910.783191] env[62383]: DEBUG nova.network.neutron [-] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 910.820298] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d0696422-81c8-4849-ade3-226090475bc1 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.948801] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 910.949401] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.949741] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 910.950051] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.951212] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 910.951212] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 910.951212] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 910.951212] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 910.951212] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 910.951520] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 910.951520] env[62383]: DEBUG nova.virt.hardware [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 910.958110] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Reconfiguring VM instance instance-00000046 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 910.958321] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea2d613b-afad-405d-ad00-151f585945af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.978993] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 910.978993] env[62383]: value = "task-2451915" [ 910.978993] env[62383]: _type = "Task" [ 910.978993] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.988628] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451915, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.109187] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.589s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.109706] env[62383]: DEBUG nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.114428] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.758s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.114649] env[62383]: DEBUG nova.objects.instance [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lazy-loading 'resources' on Instance uuid 40719661-5955-48ec-b289-b37896dd04df {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.160571] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.160571] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.202800] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451913, 'name': Rename_Task, 'duration_secs': 0.21956} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.204688] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.205155] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aab705a2-4975-47b3-880e-7723a1c0d2dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.214329] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 911.214329] env[62383]: value = "task-2451916" [ 911.214329] env[62383]: _type = "Task" [ 911.214329] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.227489] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.230620] env[62383]: DEBUG oslo_vmware.api [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451914, 'name': PowerOffVM_Task, 'duration_secs': 0.224255} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.231294] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.231609] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.232438] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7df0307a-4d6f-4a8a-9330-0c356a545bbe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.402922] env[62383]: INFO nova.compute.manager [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Rebuilding instance [ 911.462689] env[62383]: DEBUG nova.compute.manager [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 911.465378] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86db588-1a8f-435c-ad41-4f0fbde04246 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.489909] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451915, 'name': ReconfigVM_Task, 'duration_secs': 0.2496} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.491982] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Reconfigured VM instance instance-00000046 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 911.492550] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.492889] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.493226] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Deleting the datastore file [datastore2] 188d6b20-3dca-4c1c-8271-1871d2c992d5 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.495252] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46979dcd-16b4-4d53-bad1-8a57c553d4fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.498408] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22d2b552-ffdd-4b26-ad67-f18f4967b410 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.527088] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 81921762-ac51-42d2-83dc-d5b6e904fbb7/81921762-ac51-42d2-83dc-d5b6e904fbb7.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.528910] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7efb3f15-f34a-437f-a998-055e01cfa0ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.544038] env[62383]: DEBUG oslo_vmware.api [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for the task: (returnval){ [ 911.544038] env[62383]: value = "task-2451918" [ 911.544038] env[62383]: _type = "Task" [ 911.544038] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.547266] env[62383]: DEBUG nova.compute.manager [req-469fcb9f-0f33-4808-a8b0-8b7f61e978ff req-13d38b21-94bd-451d-a1da-1c39ad02834e service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Received event network-vif-deleted-387d5009-0a75-4cba-9ab0-bbdbd5398119 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 911.547545] env[62383]: INFO nova.compute.manager [req-469fcb9f-0f33-4808-a8b0-8b7f61e978ff req-13d38b21-94bd-451d-a1da-1c39ad02834e service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Neutron deleted interface 387d5009-0a75-4cba-9ab0-bbdbd5398119; detaching it from the instance and deleting it from the info cache [ 911.547778] env[62383]: DEBUG nova.network.neutron [req-469fcb9f-0f33-4808-a8b0-8b7f61e978ff req-13d38b21-94bd-451d-a1da-1c39ad02834e service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.557425] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 911.557425] env[62383]: value = "task-2451919" [ 911.557425] env[62383]: _type = "Task" [ 911.557425] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.566799] env[62383]: DEBUG oslo_vmware.api [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.572623] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.619056] env[62383]: DEBUG nova.compute.utils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 911.621493] env[62383]: DEBUG nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.621715] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.672258] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.672258] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 911.683160] env[62383]: DEBUG nova.policy [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0452e2520d954675ab3900351cd3296c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1401660f8f64c72be5f9ea6a0960ce3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.705503] env[62383]: DEBUG nova.network.neutron [-] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.724493] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451916, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.004067] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Successfully created port: 88efd462-2836-4b8e-9deb-20be5e6eff71 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.055414] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f74eb7-c440-4b1b-a442-195d563ac045 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.059122] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a41d76fe-cc21-4685-91b5-97e9927984fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.071097] env[62383]: DEBUG oslo_vmware.api [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Task: {'id': task-2451918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192716} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.073565] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.073728] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.077035] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.077035] env[62383]: INFO nova.compute.manager [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Took 1.39 seconds to destroy the instance on the hypervisor. [ 912.077035] env[62383]: DEBUG oslo.service.loopingcall [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.077035] env[62383]: DEBUG nova.compute.manager [-] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 912.077035] env[62383]: DEBUG nova.network.neutron [-] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.077263] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5187a27a-7d7e-48eb-a773-549c48df3702 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.085579] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451919, 'name': ReconfigVM_Task, 'duration_secs': 0.284159} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.086777] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 81921762-ac51-42d2-83dc-d5b6e904fbb7/81921762-ac51-42d2-83dc-d5b6e904fbb7.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.087132] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance '81921762-ac51-42d2-83dc-d5b6e904fbb7' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 912.095983] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0951a9d6-cc7a-41ea-85eb-e87802c46d8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.129015] env[62383]: DEBUG nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.134220] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e4515e-ed3f-49ec-a539-f2f1d4c3debd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.142200] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1bc620-990b-41a2-b103-f362fda0ce71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.161573] env[62383]: DEBUG nova.compute.manager [req-469fcb9f-0f33-4808-a8b0-8b7f61e978ff req-13d38b21-94bd-451d-a1da-1c39ad02834e service nova] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Detach interface failed, port_id=387d5009-0a75-4cba-9ab0-bbdbd5398119, reason: Instance 0f6b7094-27a0-4e97-98ac-bff857124b6c could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 912.174567] env[62383]: DEBUG nova.compute.provider_tree [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.208849] env[62383]: INFO nova.compute.manager [-] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Took 1.43 seconds to deallocate network for instance. [ 912.224174] env[62383]: DEBUG oslo_vmware.api [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451916, 'name': PowerOnVM_Task, 'duration_secs': 0.669379} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.224174] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.224174] env[62383]: INFO nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Took 8.26 seconds to spawn the instance on the hypervisor. [ 912.224174] env[62383]: DEBUG nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.224174] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5c3540-8e26-4259-8a68-b0628b4dd607 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.486310] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.486665] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0505923a-2df1-4bcd-a7c6-a7c370606298 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.494967] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 912.494967] env[62383]: value = "task-2451920" [ 912.494967] env[62383]: _type = "Task" [ 912.494967] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.504326] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.608340] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d177d0aa-feba-4a1b-84b7-ad10cae7da56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.629281] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fd1f94-ed84-415f-8fa9-bc049e626672 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.652841] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance '81921762-ac51-42d2-83dc-d5b6e904fbb7' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 912.666470] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Successfully created port: 8f856cf6-4638-4fe2-8094-e2856f5362aa {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.681271] env[62383]: DEBUG nova.scheduler.client.report [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.719109] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.746507] env[62383]: INFO nova.compute.manager [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Took 32.04 seconds to build instance. [ 912.754267] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.754363] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.754674] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 913.009696] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 913.010246] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 913.012953] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a345bbcb-036b-484c-b414-c6c1e1c91237 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.019153] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.019463] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed121f71-8ce7-49bb-9d34-6adf1cecf986 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.094744] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.096043] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.096043] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleting the datastore file [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.096043] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7184ff60-9f3d-4dba-9b2a-262d411427ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.103666] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 913.103666] env[62383]: value = "task-2451922" [ 913.103666] env[62383]: _type = "Task" [ 913.103666] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.114075] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.156133] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Successfully created port: 30736aa7-603a-46e9-8757-f52213094f87 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.159201] env[62383]: DEBUG nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.172553] env[62383]: DEBUG nova.network.neutron [-] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.192143] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.078s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.194421] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.026s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.194663] env[62383]: DEBUG nova.objects.instance [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lazy-loading 'resources' on Instance uuid bc37e114-cf55-408b-9841-05eaf411b4f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.204501] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.204501] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.204501] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.204501] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.204803] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.204803] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.204803] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.204803] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.204803] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.204943] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.204943] env[62383]: DEBUG nova.virt.hardware [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.205394] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085addf8-bdfd-4c42-b96c-fa55f16a4590 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.215220] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2f34da-8adf-4abc-bd6b-c613842558c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.231678] env[62383]: INFO nova.scheduler.client.report [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Deleted allocations for instance 40719661-5955-48ec-b289-b37896dd04df [ 913.246766] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c0d7188c-0185-4975-ba48-cc6be71bffba tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.566s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.277156] env[62383]: DEBUG nova.network.neutron [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Port 913ca293-96ad-478e-96f7-b0b1697a3b0d binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 913.620701] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141843} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.620701] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.620701] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.620701] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.675141] env[62383]: INFO nova.compute.manager [-] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Took 1.60 seconds to deallocate network for instance. [ 913.745783] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2fb2c429-3088-4ad1-a66c-492661c1e2a3 tempest-ServerTagsTestJSON-834356064 tempest-ServerTagsTestJSON-834356064-project-member] Lock "40719661-5955-48ec-b289-b37896dd04df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.083s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.809640] env[62383]: DEBUG nova.compute.manager [req-75628c1f-d8ca-46cc-9e87-075fafb8ce40 req-7985167d-7d09-482f-a38d-621fb1031f57 service nova] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Received event network-vif-deleted-e6fce712-4e28-4f58-ad16-497490c0dded {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 914.018219] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.018513] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.138190] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648a102b-2168-4f48-a97d-a39d1df5d82a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.147939] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ede5ac-5450-449d-9a24-8a99dedb4eec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.184373] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.185839] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Updating instance_info_cache with network_info: [{"id": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "address": "fa:16:3e:77:d2:90", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c1a084e-a9", "ovs_interfaceid": "5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.187478] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708aa108-9979-40b8-82f8-02021d0014cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.201485] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0938aac8-ff14-44f1-8eae-9e97519736f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.217278] env[62383]: DEBUG nova.compute.provider_tree [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.309849] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.310089] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.310260] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.522687] env[62383]: DEBUG nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 914.667476] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 914.668040] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 914.668040] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 914.668220] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 914.668434] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 914.668644] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 914.669089] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 914.669089] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 914.669524] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 914.669524] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 914.669695] env[62383]: DEBUG nova.virt.hardware [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 914.670749] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f13f59e-fdb9-4bb5-8f9c-0d5a54598908 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.684898] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d723d6-c175-4537-9062-f1ed496bfafb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.690816] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-2c93bdf1-aaf4-4e40-898a-634dc00d05e6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.691158] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 914.701131] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 914.702029] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:e0:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '219a19a2-eb69-4683-81ac-a79596cb28f3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 914.712234] env[62383]: DEBUG oslo.service.loopingcall [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.712581] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 914.712829] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 914.713040] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 914.713336] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ca546ae-2666-405b-9914-a7eedad20c33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.731779] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 914.733030] env[62383]: DEBUG nova.scheduler.client.report [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 914.736728] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 914.737623] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 914.737807] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 914.738043] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 914.742694] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 914.742694] env[62383]: value = "task-2451923" [ 914.742694] env[62383]: _type = "Task" [ 914.742694] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.750903] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451923, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.974790] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "f193af26-eba8-471f-a00e-0afa9b190d0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.974969] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.054402] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.240720] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.044s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.244123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.215s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.248564] env[62383]: INFO nova.compute.claims [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.252054] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.267567] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451923, 'name': CreateVM_Task, 'duration_secs': 0.319617} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.267567] env[62383]: INFO nova.scheduler.client.report [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted allocations for instance bc37e114-cf55-408b-9841-05eaf411b4f5 [ 915.268066] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.268868] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.269126] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.269561] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 915.273239] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a190915-4671-4d97-9f94-e3a54f699b40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.283434] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 915.283434] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f931f8-7298-4c4e-5689-2c442035af43" [ 915.283434] env[62383]: _type = "Task" [ 915.283434] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.293456] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f931f8-7298-4c4e-5689-2c442035af43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.342365] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Successfully updated port: 88efd462-2836-4b8e-9deb-20be5e6eff71 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.409551] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.409777] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.410869] env[62383]: DEBUG nova.network.neutron [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.478352] env[62383]: DEBUG nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 915.782140] env[62383]: DEBUG oslo_concurrency.lockutils [None req-42304ecf-0343-4ba6-97b1-0abfc057f878 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "bc37e114-cf55-408b-9841-05eaf411b4f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.681s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.799189] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f931f8-7298-4c4e-5689-2c442035af43, 'name': SearchDatastore_Task, 'duration_secs': 0.010118} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.799491] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.799716] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 915.799960] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.800125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.800308] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 915.800567] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bad821f4-e25b-462a-a250-759390385ea4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.810366] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 915.810559] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 915.811506] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0ab79c3-b4e4-4783-b718-9b4f02135734 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.817121] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 915.817121] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52faa926-c149-38b2-af09-6a5f7843f130" [ 915.817121] env[62383]: _type = "Task" [ 915.817121] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.824923] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52faa926-c149-38b2-af09-6a5f7843f130, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.852065] env[62383]: DEBUG nova.compute.manager [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-vif-plugged-88efd462-2836-4b8e-9deb-20be5e6eff71 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 915.852065] env[62383]: DEBUG oslo_concurrency.lockutils [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] Acquiring lock "c117e858-696f-43dc-9182-70380214737f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.852065] env[62383]: DEBUG oslo_concurrency.lockutils [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] Lock "c117e858-696f-43dc-9182-70380214737f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.852065] env[62383]: DEBUG oslo_concurrency.lockutils [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] Lock "c117e858-696f-43dc-9182-70380214737f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.852065] env[62383]: DEBUG nova.compute.manager [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] No waiting events found dispatching network-vif-plugged-88efd462-2836-4b8e-9deb-20be5e6eff71 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.852883] env[62383]: WARNING nova.compute.manager [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received unexpected event network-vif-plugged-88efd462-2836-4b8e-9deb-20be5e6eff71 for instance with vm_state building and task_state spawning. [ 915.852883] env[62383]: DEBUG nova.compute.manager [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-changed-88efd462-2836-4b8e-9deb-20be5e6eff71 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 915.852883] env[62383]: DEBUG nova.compute.manager [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Refreshing instance network info cache due to event network-changed-88efd462-2836-4b8e-9deb-20be5e6eff71. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 915.852883] env[62383]: DEBUG oslo_concurrency.lockutils [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] Acquiring lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.852883] env[62383]: DEBUG oslo_concurrency.lockutils [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] Acquired lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.853242] env[62383]: DEBUG nova.network.neutron [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Refreshing network info cache for port 88efd462-2836-4b8e-9deb-20be5e6eff71 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.007510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.238226] env[62383]: DEBUG nova.network.neutron [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance_info_cache with network_info: [{"id": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "address": "fa:16:3e:ee:69:75", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap913ca293-96", "ovs_interfaceid": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.330330] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52faa926-c149-38b2-af09-6a5f7843f130, 'name': SearchDatastore_Task, 'duration_secs': 0.008794} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.331139] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa35d50f-407a-4401-a1c4-4645ad218796 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.336801] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 916.336801] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5227d33d-4cc6-5e83-a123-925af0f2e483" [ 916.336801] env[62383]: _type = "Task" [ 916.336801] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.346610] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5227d33d-4cc6-5e83-a123-925af0f2e483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.400468] env[62383]: DEBUG nova.network.neutron [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.542945] env[62383]: DEBUG nova.network.neutron [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.672192] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e29aaa-6cdf-4d9a-a441-ff0b76c81458 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.679893] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1fdefb-ca4d-4b8a-a71e-d00719051b46 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.711863] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298dc112-4244-4802-92e7-bb9fea2363d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.719408] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f16f814-a53d-4226-953b-9f04404063f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.732808] env[62383]: DEBUG nova.compute.provider_tree [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.741044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.847181] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5227d33d-4cc6-5e83-a123-925af0f2e483, 'name': SearchDatastore_Task, 'duration_secs': 0.01011} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.847473] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.847905] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.847992] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ea5e1d0-406b-4451-bbd7-52dd0957a6e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.854683] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 916.854683] env[62383]: value = "task-2451924" [ 916.854683] env[62383]: _type = "Task" [ 916.854683] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.862671] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.994026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.995061] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.045698] env[62383]: DEBUG oslo_concurrency.lockutils [req-033297af-c6fd-4810-ac7c-7902b35631ae req-5675c8be-43bc-4f4c-bb48-82f58874790f service nova] Releasing lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.238372] env[62383]: DEBUG nova.scheduler.client.report [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.265838] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c5bb03-b000-47ed-8b1f-0ec4d7e1ac36 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.285701] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6d1729-b6b1-435b-ab54-7370be1e7166 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.293463] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance '81921762-ac51-42d2-83dc-d5b6e904fbb7' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 917.364080] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458427} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.364344] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.364557] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.364811] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21430695-5ed2-4d52-adb4-6257f89c3a09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.371169] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 917.371169] env[62383]: value = "task-2451925" [ 917.371169] env[62383]: _type = "Task" [ 917.371169] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.380213] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.497294] env[62383]: DEBUG nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 917.610406] env[62383]: DEBUG nova.compute.manager [req-06673992-812f-40e6-8c88-b6102c3d3c8e req-0c421d9d-53c0-47bc-931a-985ad662fbae service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-vif-plugged-8f856cf6-4638-4fe2-8094-e2856f5362aa {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 917.610628] env[62383]: DEBUG oslo_concurrency.lockutils [req-06673992-812f-40e6-8c88-b6102c3d3c8e req-0c421d9d-53c0-47bc-931a-985ad662fbae service nova] Acquiring lock "c117e858-696f-43dc-9182-70380214737f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.610839] env[62383]: DEBUG oslo_concurrency.lockutils [req-06673992-812f-40e6-8c88-b6102c3d3c8e req-0c421d9d-53c0-47bc-931a-985ad662fbae service nova] Lock "c117e858-696f-43dc-9182-70380214737f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.611015] env[62383]: DEBUG oslo_concurrency.lockutils [req-06673992-812f-40e6-8c88-b6102c3d3c8e req-0c421d9d-53c0-47bc-931a-985ad662fbae service nova] Lock "c117e858-696f-43dc-9182-70380214737f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.611363] env[62383]: DEBUG nova.compute.manager [req-06673992-812f-40e6-8c88-b6102c3d3c8e req-0c421d9d-53c0-47bc-931a-985ad662fbae service nova] [instance: c117e858-696f-43dc-9182-70380214737f] No waiting events found dispatching network-vif-plugged-8f856cf6-4638-4fe2-8094-e2856f5362aa {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 917.611463] env[62383]: WARNING nova.compute.manager [req-06673992-812f-40e6-8c88-b6102c3d3c8e req-0c421d9d-53c0-47bc-931a-985ad662fbae service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received unexpected event network-vif-plugged-8f856cf6-4638-4fe2-8094-e2856f5362aa for instance with vm_state building and task_state spawning. [ 917.742530] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.743008] env[62383]: DEBUG nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 917.746010] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.057s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.749306] env[62383]: DEBUG nova.objects.instance [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lazy-loading 'resources' on Instance uuid 506afe7c-f19b-4417-b097-485c0244a019 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.799958] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 917.800321] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77612953-31ab-4790-a5b1-574858b161b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.813230] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 917.813230] env[62383]: value = "task-2451926" [ 917.813230] env[62383]: _type = "Task" [ 917.813230] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.815199] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Successfully updated port: 8f856cf6-4638-4fe2-8094-e2856f5362aa {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 917.821459] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.883245] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068197} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.883611] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.884798] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfe1f9b-4602-4346-a012-420b651167dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.918338] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.918765] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d42687c8-dc5a-46cf-8966-abec59be04ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.944058] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 917.944058] env[62383]: value = "task-2451927" [ 917.944058] env[62383]: _type = "Task" [ 917.944058] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.951999] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451927, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.021518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.249012] env[62383]: DEBUG nova.compute.utils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 918.254500] env[62383]: DEBUG nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 918.254500] env[62383]: DEBUG nova.network.neutron [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 918.330476] env[62383]: DEBUG oslo_vmware.api [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451926, 'name': PowerOnVM_Task, 'duration_secs': 0.402263} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.330740] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.330928] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0cd68c2d-4db6-4f0d-b417-2eb299cbb0c9 tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance '81921762-ac51-42d2-83dc-d5b6e904fbb7' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 918.351832] env[62383]: DEBUG nova.policy [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c294a0cc4e6446afabfb754ba2437a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83304cfb5deb443880252c194e249565', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 918.454678] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451927, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.662919] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e17f2a-bec0-48ea-be09-a9a1a89b55c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.671023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aadeb6cb-3580-433f-b9ff-0f7964a2796c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.702471] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc8e74c-f6dd-4abf-9a9f-9cdda4ed5e4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.709953] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537bc5a3-d56b-44e8-978b-a408723e6ab2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.722637] env[62383]: DEBUG nova.compute.provider_tree [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.755177] env[62383]: DEBUG nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 918.956777] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451927, 'name': ReconfigVM_Task, 'duration_secs': 0.578864} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.957125] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1/23d24da6-c7d8-4d6a-8442-a1066505aab1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.957709] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63a3d1f6-fff7-48fb-a866-d4c04ea2970e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.964192] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 918.964192] env[62383]: value = "task-2451928" [ 918.964192] env[62383]: _type = "Task" [ 918.964192] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.972159] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451928, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.221040] env[62383]: DEBUG nova.network.neutron [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Successfully created port: b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.226413] env[62383]: DEBUG nova.scheduler.client.report [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.477396] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451928, 'name': Rename_Task, 'duration_secs': 0.132428} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.477730] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.477979] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa2ddbca-556d-4a45-9f45-e40ec7056892 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.484454] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 919.484454] env[62383]: value = "task-2451929" [ 919.484454] env[62383]: _type = "Task" [ 919.484454] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.492570] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.730890] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.733461] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.360s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.733692] env[62383]: DEBUG nova.objects.instance [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lazy-loading 'resources' on Instance uuid 4d58d2e6-171d-4346-b281-bcbd22286623 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.756505] env[62383]: INFO nova.scheduler.client.report [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted allocations for instance 506afe7c-f19b-4417-b097-485c0244a019 [ 919.765441] env[62383]: DEBUG nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 919.802256] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 919.802514] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 919.802671] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 919.802878] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 919.803054] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 919.803238] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 919.803529] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 919.803594] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 919.804929] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 919.804929] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 919.804929] env[62383]: DEBUG nova.virt.hardware [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 919.805407] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8232e1c-2f15-4525-8ec8-a63bff871c32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.816525] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ed512a-952f-4006-94be-6db0d0699615 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.824587] env[62383]: DEBUG nova.compute.manager [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-changed-8f856cf6-4638-4fe2-8094-e2856f5362aa {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 919.824587] env[62383]: DEBUG nova.compute.manager [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Refreshing instance network info cache due to event network-changed-8f856cf6-4638-4fe2-8094-e2856f5362aa. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 919.824587] env[62383]: DEBUG oslo_concurrency.lockutils [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] Acquiring lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.824587] env[62383]: DEBUG oslo_concurrency.lockutils [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] Acquired lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.824587] env[62383]: DEBUG nova.network.neutron [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Refreshing network info cache for port 8f856cf6-4638-4fe2-8094-e2856f5362aa {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.863619] env[62383]: DEBUG nova.compute.manager [req-5a32d00b-5913-42c2-b90b-70cba60ef0d8 req-378f29ef-b743-4e47-a95a-d0c8fecb0026 service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-vif-plugged-30736aa7-603a-46e9-8757-f52213094f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 919.863619] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a32d00b-5913-42c2-b90b-70cba60ef0d8 req-378f29ef-b743-4e47-a95a-d0c8fecb0026 service nova] Acquiring lock "c117e858-696f-43dc-9182-70380214737f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.863619] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a32d00b-5913-42c2-b90b-70cba60ef0d8 req-378f29ef-b743-4e47-a95a-d0c8fecb0026 service nova] Lock "c117e858-696f-43dc-9182-70380214737f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.863619] env[62383]: DEBUG oslo_concurrency.lockutils [req-5a32d00b-5913-42c2-b90b-70cba60ef0d8 req-378f29ef-b743-4e47-a95a-d0c8fecb0026 service nova] Lock "c117e858-696f-43dc-9182-70380214737f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.863619] env[62383]: DEBUG nova.compute.manager [req-5a32d00b-5913-42c2-b90b-70cba60ef0d8 req-378f29ef-b743-4e47-a95a-d0c8fecb0026 service nova] [instance: c117e858-696f-43dc-9182-70380214737f] No waiting events found dispatching network-vif-plugged-30736aa7-603a-46e9-8757-f52213094f87 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.863809] env[62383]: WARNING nova.compute.manager [req-5a32d00b-5913-42c2-b90b-70cba60ef0d8 req-378f29ef-b743-4e47-a95a-d0c8fecb0026 service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received unexpected event network-vif-plugged-30736aa7-603a-46e9-8757-f52213094f87 for instance with vm_state building and task_state spawning. [ 919.888938] env[62383]: DEBUG nova.network.neutron [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 919.981691] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Successfully updated port: 30736aa7-603a-46e9-8757-f52213094f87 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.003566] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451929, 'name': PowerOnVM_Task, 'duration_secs': 0.477223} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.004096] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.004414] env[62383]: DEBUG nova.compute.manager [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.006221] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2d9f3c-a26e-4fa8-a0f3-6fb0383eb4d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.030117] env[62383]: DEBUG nova.network.neutron [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.264086] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c2555409-7235-4908-80ec-a73a6fa36a9d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "506afe7c-f19b-4417-b097-485c0244a019" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.204s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.309597] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.309874] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.310213] env[62383]: DEBUG nova.compute.manager [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Going to confirm migration 5 {{(pid=62383) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 920.486143] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.520393] env[62383]: INFO nova.compute.manager [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] bringing vm to original state: 'stopped' [ 920.534526] env[62383]: DEBUG oslo_concurrency.lockutils [req-3ef11c5c-da3d-47b6-a758-35071d345054 req-6f55c7b7-54ee-4fe2-9302-ce1222f2abda service nova] Releasing lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.534936] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.535129] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.648631] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78afec32-764c-4b40-8ec3-34c2d86f8396 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.653939] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.654232] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.654479] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.654628] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.654808] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.658806] env[62383]: INFO nova.compute.manager [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Terminating instance [ 920.660865] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f73c26-4990-4088-aec4-51b3f964aff1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.707519] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f328fd3-e256-41e4-9a26-5a8527b3f273 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.716486] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8829af0f-e688-4617-a103-bfdeafaa479e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.731818] env[62383]: DEBUG nova.compute.provider_tree [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.922764] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 920.923515] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.923515] env[62383]: DEBUG nova.network.neutron [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.923515] env[62383]: DEBUG nova.objects.instance [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lazy-loading 'info_cache' on Instance uuid 81921762-ac51-42d2-83dc-d5b6e904fbb7 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.073900] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.083272] env[62383]: DEBUG nova.network.neutron [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Successfully updated port: b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.170527] env[62383]: DEBUG nova.compute.manager [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.170819] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.171968] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f01b47b-870e-4b4f-b4c1-6b5a22aa9975 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.181740] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.182042] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d27ff996-fb4e-435a-96df-7356fe637fe2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.189556] env[62383]: DEBUG oslo_vmware.api [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 921.189556] env[62383]: value = "task-2451930" [ 921.189556] env[62383]: _type = "Task" [ 921.189556] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.200102] env[62383]: DEBUG oslo_vmware.api [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.234929] env[62383]: DEBUG nova.scheduler.client.report [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.242891] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.243204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.507763] env[62383]: DEBUG nova.network.neutron [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Updating instance_info_cache with network_info: [{"id": "88efd462-2836-4b8e-9deb-20be5e6eff71", "address": "fa:16:3e:c4:11:77", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88efd462-28", "ovs_interfaceid": "88efd462-2836-4b8e-9deb-20be5e6eff71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8f856cf6-4638-4fe2-8094-e2856f5362aa", "address": "fa:16:3e:91:93:7c", "network": {"id": "7b22b41a-9325-4225-b261-930495417b57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2134908736", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f856cf6-46", "ovs_interfaceid": "8f856cf6-4638-4fe2-8094-e2856f5362aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30736aa7-603a-46e9-8757-f52213094f87", "address": "fa:16:3e:4e:af:ee", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30736aa7-60", "ovs_interfaceid": "30736aa7-603a-46e9-8757-f52213094f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.527181] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.527552] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.527740] env[62383]: DEBUG nova.compute.manager [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 921.528654] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00f9875-ccd5-4b6b-bbe4-6afb1a457f8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.535913] env[62383]: DEBUG nova.compute.manager [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 921.586852] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.587015] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.587185] env[62383]: DEBUG nova.network.neutron [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.699765] env[62383]: DEBUG oslo_vmware.api [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451930, 'name': PowerOffVM_Task, 'duration_secs': 0.331608} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.699765] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.699904] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.702159] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e248bd31-674c-4a2f-880c-557a5a7b9f82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.740693] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.743906] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.305s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.743906] env[62383]: DEBUG nova.objects.instance [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lazy-loading 'resources' on Instance uuid b9669bb8-680f-492a-a7c6-82e6edb0a8ed {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.748391] env[62383]: DEBUG nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 921.758667] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.758893] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.759106] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleting the datastore file [datastore2] 7740a70f-3c95-49aa-b3ec-0e0effd3efcc {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.759356] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c64bd371-630f-43c6-8ef6-f4a8f98a8c79 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.768928] env[62383]: DEBUG oslo_vmware.api [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 921.768928] env[62383]: value = "task-2451932" [ 921.768928] env[62383]: _type = "Task" [ 921.768928] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.774914] env[62383]: INFO nova.scheduler.client.report [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted allocations for instance 4d58d2e6-171d-4346-b281-bcbd22286623 [ 921.784019] env[62383]: DEBUG oslo_vmware.api [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.916980] env[62383]: DEBUG nova.compute.manager [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Received event network-vif-plugged-b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 921.917231] env[62383]: DEBUG oslo_concurrency.lockutils [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] Acquiring lock "eb632e2d-b71e-446d-83a2-0bab1d823d27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.917438] env[62383]: DEBUG oslo_concurrency.lockutils [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 921.917637] env[62383]: DEBUG oslo_concurrency.lockutils [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.917775] env[62383]: DEBUG nova.compute.manager [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] No waiting events found dispatching network-vif-plugged-b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 921.917949] env[62383]: WARNING nova.compute.manager [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Received unexpected event network-vif-plugged-b34b897a-3f37-4846-a7e9-0c248d1ecaf9 for instance with vm_state building and task_state spawning. [ 921.918235] env[62383]: DEBUG nova.compute.manager [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Received event network-changed-b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 921.918304] env[62383]: DEBUG nova.compute.manager [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Refreshing instance network info cache due to event network-changed-b34b897a-3f37-4846-a7e9-0c248d1ecaf9. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 921.918472] env[62383]: DEBUG oslo_concurrency.lockutils [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] Acquiring lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.934575] env[62383]: DEBUG nova.compute.manager [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-changed-30736aa7-603a-46e9-8757-f52213094f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 921.934575] env[62383]: DEBUG nova.compute.manager [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Refreshing instance network info cache due to event network-changed-30736aa7-603a-46e9-8757-f52213094f87. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 921.934689] env[62383]: DEBUG oslo_concurrency.lockutils [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] Acquiring lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.010386] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Releasing lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.010767] env[62383]: DEBUG nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Instance network_info: |[{"id": "88efd462-2836-4b8e-9deb-20be5e6eff71", "address": "fa:16:3e:c4:11:77", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88efd462-28", "ovs_interfaceid": "88efd462-2836-4b8e-9deb-20be5e6eff71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8f856cf6-4638-4fe2-8094-e2856f5362aa", "address": "fa:16:3e:91:93:7c", "network": {"id": "7b22b41a-9325-4225-b261-930495417b57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2134908736", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f856cf6-46", "ovs_interfaceid": "8f856cf6-4638-4fe2-8094-e2856f5362aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30736aa7-603a-46e9-8757-f52213094f87", "address": "fa:16:3e:4e:af:ee", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30736aa7-60", "ovs_interfaceid": "30736aa7-603a-46e9-8757-f52213094f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 922.011078] env[62383]: DEBUG oslo_concurrency.lockutils [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] Acquired lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.011261] env[62383]: DEBUG nova.network.neutron [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Refreshing network info cache for port 30736aa7-603a-46e9-8757-f52213094f87 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.014647] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:11:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '88efd462-2836-4b8e-9deb-20be5e6eff71', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:93:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b911797-478d-4ee5-bce9-6f2f49014e94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f856cf6-4638-4fe2-8094-e2856f5362aa', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:af:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30736aa7-603a-46e9-8757-f52213094f87', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.030866] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Creating folder: Project (e1401660f8f64c72be5f9ea6a0960ce3). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.034729] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84fca500-d284-4965-bdc5-0c338b5ae5a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.039659] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.040399] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29390845-cc37-459b-b4d7-9dff33589c63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.045304] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Created folder: Project (e1401660f8f64c72be5f9ea6a0960ce3) in parent group-v496304. [ 922.048017] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Creating folder: Instances. Parent ref: group-v496524. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.048017] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0fd597b-2f41-4439-afa0-36ed44be482b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.049330] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 922.049330] env[62383]: value = "task-2451934" [ 922.049330] env[62383]: _type = "Task" [ 922.049330] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.059087] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451934, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.061929] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Created folder: Instances in parent group-v496524. [ 922.061929] env[62383]: DEBUG oslo.service.loopingcall [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.061929] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c117e858-696f-43dc-9182-70380214737f] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.061929] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a2b6f71-7fe1-40d9-a2fa-e884458db665 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.090295] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.090295] env[62383]: value = "task-2451936" [ 922.090295] env[62383]: _type = "Task" [ 922.090295] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.099802] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451936, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.135941] env[62383]: DEBUG nova.network.neutron [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 922.269287] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.269698] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.269988] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.270266] env[62383]: INFO nova.compute.manager [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Rebooting instance [ 922.275550] env[62383]: DEBUG nova.network.neutron [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance_info_cache with network_info: [{"id": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "address": "fa:16:3e:ee:69:75", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap913ca293-96", "ovs_interfaceid": "913ca293-96ad-478e-96f7-b0b1697a3b0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.287297] env[62383]: DEBUG oslo_concurrency.lockutils [None req-820c16ff-0182-4bd7-9ca3-8ecc371976fa tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "4d58d2e6-171d-4346-b281-bcbd22286623" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.676s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.291421] env[62383]: DEBUG oslo_vmware.api [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39967} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.291993] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.292227] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.292555] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.292822] env[62383]: INFO nova.compute.manager [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Took 1.12 seconds to destroy the instance on the hypervisor. [ 922.293161] env[62383]: DEBUG oslo.service.loopingcall [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.296762] env[62383]: DEBUG nova.compute.manager [-] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 922.296762] env[62383]: DEBUG nova.network.neutron [-] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.350886] env[62383]: DEBUG nova.network.neutron [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Updating instance_info_cache with network_info: [{"id": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "address": "fa:16:3e:93:f9:c4", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34b897a-3f", "ovs_interfaceid": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.563513] env[62383]: DEBUG oslo_vmware.api [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451934, 'name': PowerOffVM_Task, 'duration_secs': 0.199141} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.563792] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.564021] env[62383]: DEBUG nova.compute.manager [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.564821] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1c6b1f-15d1-42c7-901a-b3ea53f9858b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.604634] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451936, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.653096] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82511f3-3703-4906-aeb4-6d020be3a0b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.662816] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeab0080-7cd6-4dbb-b0a5-d31208188c6d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.700012] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34025ef-a534-42df-95b7-819bed24a745 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.708591] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74aa0ba-e192-4185-ae8e-dd394c2015fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.724238] env[62383]: DEBUG nova.compute.provider_tree [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.784308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-81921762-ac51-42d2-83dc-d5b6e904fbb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.784564] env[62383]: DEBUG nova.objects.instance [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lazy-loading 'migration_context' on Instance uuid 81921762-ac51-42d2-83dc-d5b6e904fbb7 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 922.804020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 922.804020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.804020] env[62383]: DEBUG nova.network.neutron [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.831517] env[62383]: DEBUG nova.network.neutron [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Updated VIF entry in instance network info cache for port 30736aa7-603a-46e9-8757-f52213094f87. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.831941] env[62383]: DEBUG nova.network.neutron [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Updating instance_info_cache with network_info: [{"id": "88efd462-2836-4b8e-9deb-20be5e6eff71", "address": "fa:16:3e:c4:11:77", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88efd462-28", "ovs_interfaceid": "88efd462-2836-4b8e-9deb-20be5e6eff71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8f856cf6-4638-4fe2-8094-e2856f5362aa", "address": "fa:16:3e:91:93:7c", "network": {"id": "7b22b41a-9325-4225-b261-930495417b57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2134908736", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.43", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b911797-478d-4ee5-bce9-6f2f49014e94", "external-id": "nsx-vlan-transportzone-70", "segmentation_id": 70, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f856cf6-46", "ovs_interfaceid": "8f856cf6-4638-4fe2-8094-e2856f5362aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30736aa7-603a-46e9-8757-f52213094f87", "address": "fa:16:3e:4e:af:ee", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30736aa7-60", "ovs_interfaceid": "30736aa7-603a-46e9-8757-f52213094f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.852536] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.852857] env[62383]: DEBUG nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Instance network_info: |[{"id": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "address": "fa:16:3e:93:f9:c4", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34b897a-3f", "ovs_interfaceid": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 922.853181] env[62383]: DEBUG oslo_concurrency.lockutils [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] Acquired lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.853343] env[62383]: DEBUG nova.network.neutron [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Refreshing network info cache for port b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.854733] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:f9:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '01fe2e08-46f6-4cee-aefd-934461f8077d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b34b897a-3f37-4846-a7e9-0c248d1ecaf9', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.862020] env[62383]: DEBUG oslo.service.loopingcall [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.865230] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.865671] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a20337d2-3f58-476b-a365-12fd26f64d69 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.886771] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.886771] env[62383]: value = "task-2451937" [ 922.886771] env[62383]: _type = "Task" [ 922.886771] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.894868] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451937, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.083130] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.096161] env[62383]: DEBUG nova.network.neutron [-] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.105584] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451936, 'name': CreateVM_Task, 'duration_secs': 0.529348} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.105754] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c117e858-696f-43dc-9182-70380214737f] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 923.109205] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.109370] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.110776] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 923.110776] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32044d72-bfc4-4c2f-b9d5-b4921d038a3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.117829] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 923.117829] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1ec32-cc3d-b81c-19ae-aa36aa208e63" [ 923.117829] env[62383]: _type = "Task" [ 923.117829] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.130279] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1ec32-cc3d-b81c-19ae-aa36aa208e63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.227320] env[62383]: DEBUG nova.scheduler.client.report [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.233073] env[62383]: DEBUG nova.network.neutron [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Updated VIF entry in instance network info cache for port b34b897a-3f37-4846-a7e9-0c248d1ecaf9. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 923.233441] env[62383]: DEBUG nova.network.neutron [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Updating instance_info_cache with network_info: [{"id": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "address": "fa:16:3e:93:f9:c4", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34b897a-3f", "ovs_interfaceid": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.290627] env[62383]: DEBUG nova.objects.base [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Object Instance<81921762-ac51-42d2-83dc-d5b6e904fbb7> lazy-loaded attributes: info_cache,migration_context {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 923.291616] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1738f7a5-19a7-4d01-93a1-c639b6151e9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.315308] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5861380-5cdf-4063-a68f-41982a366bcd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.321102] env[62383]: DEBUG oslo_vmware.api [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 923.321102] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c67ae0-8bd9-1aa0-1653-1393d2a949b0" [ 923.321102] env[62383]: _type = "Task" [ 923.321102] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.328972] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Acquiring lock "d0311c29-e1ed-446f-a52b-1687b9561740" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.329339] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "d0311c29-e1ed-446f-a52b-1687b9561740" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.329447] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Acquiring lock "d0311c29-e1ed-446f-a52b-1687b9561740-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.329607] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "d0311c29-e1ed-446f-a52b-1687b9561740-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.329773] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "d0311c29-e1ed-446f-a52b-1687b9561740-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.331325] env[62383]: DEBUG oslo_vmware.api [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c67ae0-8bd9-1aa0-1653-1393d2a949b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.331761] env[62383]: INFO nova.compute.manager [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Terminating instance [ 923.334402] env[62383]: DEBUG oslo_concurrency.lockutils [req-dfcdb0c7-a92a-47b9-8dde-69171718d8c1 req-f33bc02d-fa39-48f9-bbf4-38d0864345ba service nova] Releasing lock "refresh_cache-c117e858-696f-43dc-9182-70380214737f" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.399677] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451937, 'name': CreateVM_Task, 'duration_secs': 0.358082} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.400147] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 923.401164] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.403152] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.403561] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 923.403847] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43a12402-72e0-43b2-ac65-7d5ebea91b1d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.409303] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 923.409303] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1446f-a36f-c730-edc1-fec352a26923" [ 923.409303] env[62383]: _type = "Task" [ 923.409303] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.121653] env[62383]: INFO nova.compute.manager [-] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Took 1.83 seconds to deallocate network for instance. [ 924.122228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.379s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.124481] env[62383]: DEBUG oslo_concurrency.lockutils [req-8970b0b0-6894-4574-8242-e1f9d9222dec req-4b84c583-f06d-4730-9371-07b4f79ba2d9 service nova] Releasing lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.125173] env[62383]: DEBUG nova.compute.manager [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 924.125385] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.127242] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1446f-a36f-c730-edc1-fec352a26923, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.127425] env[62383]: WARNING oslo_vmware.common.loopingcall [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] task run outlasted interval by 0.21790500000000002 sec [ 924.135562] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.417s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.135777] env[62383]: DEBUG nova.objects.instance [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lazy-loading 'resources' on Instance uuid 0f6b7094-27a0-4e97-98ac-bff857124b6c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.139758] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62b01b25-609b-44f6-9667-c8fb9bc86eb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.143144] env[62383]: DEBUG nova.compute.manager [req-f5b015e2-7825-4078-9365-3d2c9b57f1ae req-f7438793-155f-49b3-95b4-744c5738c3b4 service nova] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Received event network-vif-deleted-2ff20743-555a-49bd-964f-be249744a686 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 924.144342] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.159322] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1ec32-cc3d-b81c-19ae-aa36aa208e63, 'name': SearchDatastore_Task, 'duration_secs': 0.010586} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.170025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.170025] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.170025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.170025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.170025] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.170025] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e1446f-a36f-c730-edc1-fec352a26923, 'name': SearchDatastore_Task, 'duration_secs': 0.010074} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.170025] env[62383]: DEBUG oslo_vmware.api [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c67ae0-8bd9-1aa0-1653-1393d2a949b0, 'name': SearchDatastore_Task, 'duration_secs': 0.007892} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.170025] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 924.170025] env[62383]: value = "task-2451938" [ 924.170025] env[62383]: _type = "Task" [ 924.170025] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.170899] env[62383]: INFO nova.scheduler.client.report [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted allocations for instance b9669bb8-680f-492a-a7c6-82e6edb0a8ed [ 924.171783] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed01fd2b-022d-40d1-be40-98f3674374e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.174646] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.174646] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.174646] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.174646] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.174646] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.174966] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.179899] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85dcb1cc-d645-4e06-af13-343978de9ccb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.191711] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451938, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.194232] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.194409] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.195230] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.195402] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.198558] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58f04397-fa9c-47e3-8c29-a6f7e9f8eef5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.200784] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0494b26b-d931-49f4-863e-d4269e6f9efd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.209120] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 924.209120] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e0fa89-15df-8e6d-1104-868a9100ec39" [ 924.209120] env[62383]: _type = "Task" [ 924.209120] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.210772] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 924.210772] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522c4f46-9a2f-d47d-a9ba-dfcbbba194f6" [ 924.210772] env[62383]: _type = "Task" [ 924.210772] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.222781] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e0fa89-15df-8e6d-1104-868a9100ec39, 'name': SearchDatastore_Task, 'duration_secs': 0.009611} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.230032] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522c4f46-9a2f-d47d-a9ba-dfcbbba194f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009365} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.230032] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5b5b6b6-abb6-4248-b343-b17e5f3ae728 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.232945] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c455b61-4734-488a-81b9-842f1cb1f9b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.238935] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 924.238935] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527a3361-fea8-b1fd-68cb-070a3df90288" [ 924.238935] env[62383]: _type = "Task" [ 924.238935] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.240510] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 924.240510] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c46b07-fe01-3009-5dd0-622b8c822657" [ 924.240510] env[62383]: _type = "Task" [ 924.240510] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.256325] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527a3361-fea8-b1fd-68cb-070a3df90288, 'name': SearchDatastore_Task, 'duration_secs': 0.009334} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.259995] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.260323] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] eb632e2d-b71e-446d-83a2-0bab1d823d27/eb632e2d-b71e-446d-83a2-0bab1d823d27.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.260807] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c46b07-fe01-3009-5dd0-622b8c822657, 'name': SearchDatastore_Task, 'duration_secs': 0.009461} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.260878] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2821dd6-1778-4e28-9a2b-f230c522b056 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.262787] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.263026] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] c117e858-696f-43dc-9182-70380214737f/c117e858-696f-43dc-9182-70380214737f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.265555] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1702d066-10d0-437e-94f5-80a4434d8594 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.273895] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 924.273895] env[62383]: value = "task-2451940" [ 924.273895] env[62383]: _type = "Task" [ 924.273895] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.275103] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 924.275103] env[62383]: value = "task-2451939" [ 924.275103] env[62383]: _type = "Task" [ 924.275103] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.288367] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.292982] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.435159] env[62383]: DEBUG nova.network.neutron [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.571670] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe69369-a809-4eb9-a788-0a44ca40abda {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.580603] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d889378-ca8d-4164-9337-6a67af817337 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.615310] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36ff6d3-30f0-4975-80e2-6140fca772a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.624087] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3b32ba-6da6-45f6-90b0-2b24eaf5ce07 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.640812] env[62383]: DEBUG nova.compute.provider_tree [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.650420] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.650691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.650904] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "23d24da6-c7d8-4d6a-8442-a1066505aab1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.651112] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.651291] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.654860] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.656685] env[62383]: INFO nova.compute.manager [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Terminating instance [ 924.685271] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451938, 'name': PowerOffVM_Task, 'duration_secs': 0.214857} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.685563] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.685771] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 924.685956] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496409', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'name': 'volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0311c29-e1ed-446f-a52b-1687b9561740', 'attached_at': '', 'detached_at': '', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'serial': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 924.689050] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebbf420-011a-4ed2-b5fc-fe09eb4827c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.693204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11aa9134-e2d7-44e0-a43c-0b7c6eb77850 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "b9669bb8-680f-492a-a7c6-82e6edb0a8ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.019s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.715895] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4491d85-a651-4ec0-8880-9660a4b3535b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.726245] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c424c73-04a1-43a4-a41b-3ecd3f21d0a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.749791] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4515625c-ecd4-4a2c-a30b-2d70fd5b653c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.773028] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] The volume has not been displaced from its original location: [datastore2] volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781/volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 924.776794] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Reconfiguring VM instance instance-00000034 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 924.777327] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-516d4ea6-ba5f-4ff1-8f17-598f58ce68d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.805135] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451939, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.809340] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 924.809340] env[62383]: value = "task-2451941" [ 924.809340] env[62383]: _type = "Task" [ 924.809340] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.809610] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451940, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.820427] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451941, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.938829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.148647] env[62383]: DEBUG nova.scheduler.client.report [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 925.163050] env[62383]: DEBUG nova.compute.manager [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 925.163050] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.163050] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf1acb4-e296-4566-8c88-bb620ed3346a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.169018] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.169290] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-438792a9-41b5-470f-b1c3-8883772ecc7a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.229863] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.230145] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.230361] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleting the datastore file [datastore2] 23d24da6-c7d8-4d6a-8442-a1066505aab1 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.230655] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d7685fa-a42b-4199-9c7a-b1ee9898cc7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.237783] env[62383]: DEBUG oslo_vmware.api [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 925.237783] env[62383]: value = "task-2451943" [ 925.237783] env[62383]: _type = "Task" [ 925.237783] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.246259] env[62383]: DEBUG oslo_vmware.api [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.287123] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451940, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567614} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.287240] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] c117e858-696f-43dc-9182-70380214737f/c117e858-696f-43dc-9182-70380214737f.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.287540] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.287927] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29e17623-f982-4e17-b7ea-9b20b526a35c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.300107] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 925.300107] env[62383]: value = "task-2451944" [ 925.300107] env[62383]: _type = "Task" [ 925.300107] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.312128] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451939, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598643} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.317418] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] eb632e2d-b71e-446d-83a2-0bab1d823d27/eb632e2d-b71e-446d-83a2-0bab1d823d27.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.317662] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.321197] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d672b9c6-cdfd-448f-b75b-f386c0e7b952 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.327062] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.329036] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451941, 'name': ReconfigVM_Task, 'duration_secs': 0.183212} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.330384] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Reconfigured VM instance instance-00000034 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 925.335814] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 925.335814] env[62383]: value = "task-2451945" [ 925.335814] env[62383]: _type = "Task" [ 925.335814] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.336129] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc735dfd-8448-4332-8d70-5ad18575b10a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.360355] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.360504] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 925.360504] env[62383]: value = "task-2451946" [ 925.360504] env[62383]: _type = "Task" [ 925.360504] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.371118] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451946, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.446259] env[62383]: DEBUG nova.compute.manager [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 925.447258] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdd0161-65aa-4dad-a0a7-dc64aa28488f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.659234] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.520s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.659234] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.474s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.659326] env[62383]: DEBUG nova.objects.instance [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lazy-loading 'resources' on Instance uuid 188d6b20-3dca-4c1c-8271-1871d2c992d5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.685397] env[62383]: INFO nova.scheduler.client.report [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Deleted allocations for instance 0f6b7094-27a0-4e97-98ac-bff857124b6c [ 925.749104] env[62383]: DEBUG oslo_vmware.api [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2451943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149927} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.749373] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.749560] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.749736] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.749955] env[62383]: INFO nova.compute.manager [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Took 0.59 seconds to destroy the instance on the hypervisor. [ 925.750213] env[62383]: DEBUG oslo.service.loopingcall [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 925.751046] env[62383]: DEBUG nova.compute.manager [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 925.751226] env[62383]: DEBUG nova.network.neutron [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 925.811910] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100669} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.816491] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.817506] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43afbaa5-65fd-4ec3-89a7-a507927d0545 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.858014] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] c117e858-696f-43dc-9182-70380214737f/c117e858-696f-43dc-9182-70380214737f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.858014] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2956dc7-0050-4765-a2b9-696cd4eeefd9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.884252] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 925.884252] env[62383]: value = "task-2451947" [ 925.884252] env[62383]: _type = "Task" [ 925.884252] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.892914] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174354} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.895187] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451946, 'name': ReconfigVM_Task, 'duration_secs': 0.253126} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.899305] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 925.900876] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496409', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'name': 'volume-884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd0311c29-e1ed-446f-a52b-1687b9561740', 'attached_at': '', 'detached_at': '', 'volume_id': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781', 'serial': '884f4ed9-0cfe-43bd-8c26-6c9365c1b781'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 925.901462] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.903630] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286f2788-18aa-4091-9343-05b34d177fbf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.907402] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a86b1d-3bc7-4396-aa0c-8a86844bfe79 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.915270] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4136466e-d9c6-448a-b392-415bb7c44a8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.915562] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.921348] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.950677] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] eb632e2d-b71e-446d-83a2-0bab1d823d27/eb632e2d-b71e-446d-83a2-0bab1d823d27.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 925.953784] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adab7899-3afd-4bbc-ac3b-92cbe4ddb2bd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.969741] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.973184] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-905a47bd-22e2-4928-beec-cdba3f6eec7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.981011] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 925.981011] env[62383]: value = "task-2451949" [ 925.981011] env[62383]: _type = "Task" [ 925.981011] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.989998] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451949, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.043398] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 926.043669] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 926.043854] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Deleting the datastore file [datastore2] d0311c29-e1ed-446f-a52b-1687b9561740 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 926.044312] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-436d8f31-0f2f-4bbf-b74d-f1390b5977ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.051023] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for the task: (returnval){ [ 926.051023] env[62383]: value = "task-2451950" [ 926.051023] env[62383]: _type = "Task" [ 926.051023] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.060927] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.156280] env[62383]: DEBUG nova.compute.manager [req-9dbf688c-733c-4f9e-a7bf-f334b45cd737 req-7a7baae6-a056-44ec-a079-6d5413642aed service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Received event network-vif-deleted-219a19a2-eb69-4683-81ac-a79596cb28f3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 926.156577] env[62383]: INFO nova.compute.manager [req-9dbf688c-733c-4f9e-a7bf-f334b45cd737 req-7a7baae6-a056-44ec-a079-6d5413642aed service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Neutron deleted interface 219a19a2-eb69-4683-81ac-a79596cb28f3; detaching it from the instance and deleting it from the info cache [ 926.157640] env[62383]: DEBUG nova.network.neutron [req-9dbf688c-733c-4f9e-a7bf-f334b45cd737 req-7a7baae6-a056-44ec-a079-6d5413642aed service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.195743] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02a6f671-69d1-4f14-b83e-ca60753cd76b tempest-ImagesOneServerNegativeTestJSON-662818952 tempest-ImagesOneServerNegativeTestJSON-662818952-project-member] Lock "0f6b7094-27a0-4e97-98ac-bff857124b6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.090s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.406766] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451947, 'name': ReconfigVM_Task, 'duration_secs': 0.483968} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.407101] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Reconfigured VM instance instance-00000050 to attach disk [datastore1] c117e858-696f-43dc-9182-70380214737f/c117e858-696f-43dc-9182-70380214737f.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.407744] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8445dd0-dad0-47f9-88ac-4fcdaf4495cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.419644] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 926.419644] env[62383]: value = "task-2451951" [ 926.419644] env[62383]: _type = "Task" [ 926.419644] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.428700] env[62383]: DEBUG nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 926.432041] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451951, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.480325] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fb4e38-e6f4-4fd2-95b5-f9acee7c2e13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.508326] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Doing hard reboot of VM {{(pid=62383) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 926.508640] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451949, 'name': ReconfigVM_Task, 'duration_secs': 0.2919} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.513016] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-df4d1ff7-53af-4961-bd56-7bfd1e515701 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.513652] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Reconfigured VM instance instance-00000051 to attach disk [datastore2] eb632e2d-b71e-446d-83a2-0bab1d823d27/eb632e2d-b71e-446d-83a2-0bab1d823d27.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 926.515794] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-485bc461-6b98-49de-8c04-c95cb9a50d68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.522868] env[62383]: DEBUG oslo_vmware.api [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 926.522868] env[62383]: value = "task-2451952" [ 926.522868] env[62383]: _type = "Task" [ 926.522868] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.524530] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 926.524530] env[62383]: value = "task-2451953" [ 926.524530] env[62383]: _type = "Task" [ 926.524530] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.542788] env[62383]: DEBUG oslo_vmware.api [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451952, 'name': ResetVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.548272] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451953, 'name': Rename_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.562664] env[62383]: DEBUG oslo_vmware.api [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Task: {'id': task-2451950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082933} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.562938] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.563140] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.563374] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.563493] env[62383]: INFO nova.compute.manager [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Took 2.44 seconds to destroy the instance on the hypervisor. [ 926.563728] env[62383]: DEBUG oslo.service.loopingcall [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.563923] env[62383]: DEBUG nova.compute.manager [-] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.564040] env[62383]: DEBUG nova.network.neutron [-] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.633532] env[62383]: DEBUG nova.network.neutron [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.635386] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9c73b7-a2a9-4cd6-82ea-d9ac0f4a9c9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.646244] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68a7ea9-703a-45c4-9154-8edbe1d6d947 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.683476] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e95eee43-9717-4fcd-a88e-4cc963fc906e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.686055] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c27f27-4159-448d-b268-209858f60ce6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.691843] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.692144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.703014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "362da311-fa2b-435d-b972-155a3ac22cbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.703253] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "362da311-fa2b-435d-b972-155a3ac22cbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.708563] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cc125e-bbb7-467b-80dd-80aad6238f36 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.723384] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7805e7ac-600d-425e-aea8-e5a5e123d193 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.743608] env[62383]: DEBUG nova.compute.provider_tree [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.759018] env[62383]: DEBUG nova.compute.manager [req-9dbf688c-733c-4f9e-a7bf-f334b45cd737 req-7a7baae6-a056-44ec-a079-6d5413642aed service nova] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Detach interface failed, port_id=219a19a2-eb69-4683-81ac-a79596cb28f3, reason: Instance 23d24da6-c7d8-4d6a-8442-a1066505aab1 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 926.927933] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451951, 'name': Rename_Task, 'duration_secs': 0.166753} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.928244] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.928487] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6778906e-3828-47ef-8e6d-99720fc0fde6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.936408] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 926.936408] env[62383]: value = "task-2451954" [ 926.936408] env[62383]: _type = "Task" [ 926.936408] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.948978] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.965014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.034926] env[62383]: DEBUG oslo_vmware.api [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2451952, 'name': ResetVM_Task, 'duration_secs': 0.114673} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.035842] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Did hard reboot of VM {{(pid=62383) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 927.035842] env[62383]: DEBUG nova.compute.manager [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 927.036553] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5131b9fd-c27a-4ca7-8bde-0636577a5387 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.042362] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451953, 'name': Rename_Task, 'duration_secs': 0.168618} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.043045] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.043338] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b78d0dd-0b54-410e-b83a-ed16a6a23798 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.052742] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 927.052742] env[62383]: value = "task-2451955" [ 927.052742] env[62383]: _type = "Task" [ 927.052742] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.060257] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.142257] env[62383]: INFO nova.compute.manager [-] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Took 1.39 seconds to deallocate network for instance. [ 927.196247] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 927.205728] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 927.247067] env[62383]: DEBUG nova.scheduler.client.report [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.449364] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451954, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.558598] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7406c2a5-8a2f-40a8-b507-fdb72429f54f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.288s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.567036] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451955, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.651332] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.719512] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.730342] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.754523] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.757026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.703s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.760148] env[62383]: INFO nova.compute.claims [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.776463] env[62383]: INFO nova.scheduler.client.report [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Deleted allocations for instance 188d6b20-3dca-4c1c-8271-1871d2c992d5 [ 927.946510] env[62383]: DEBUG oslo_vmware.api [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451954, 'name': PowerOnVM_Task, 'duration_secs': 0.708982} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.946783] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.947046] env[62383]: INFO nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Took 14.79 seconds to spawn the instance on the hypervisor. [ 927.947244] env[62383]: DEBUG nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 927.948009] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c9d610-018f-4bfe-8a4e-1770b6813c04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.007732] env[62383]: DEBUG nova.network.neutron [-] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.063720] env[62383]: DEBUG oslo_vmware.api [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2451955, 'name': PowerOnVM_Task, 'duration_secs': 0.780599} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.063967] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 928.064421] env[62383]: INFO nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Took 8.30 seconds to spawn the instance on the hypervisor. [ 928.064621] env[62383]: DEBUG nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.066023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ec10ed-b4a1-404f-b517-b2c77badd4c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.182471] env[62383]: DEBUG nova.compute.manager [req-b6f650eb-d19d-4444-9aff-33648774d910 req-4f8a110b-f327-4e55-84d9-ac653c668f76 service nova] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Received event network-vif-deleted-9a97ba80-acdf-4ecf-a553-e26b0d98c82f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 928.283861] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc5a947d-a77b-4d70-a3a6-fb102335e837 tempest-ServerAddressesNegativeTestJSON-436725143 tempest-ServerAddressesNegativeTestJSON-436725143-project-member] Lock "188d6b20-3dca-4c1c-8271-1871d2c992d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.120s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.469434] env[62383]: INFO nova.compute.manager [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Took 43.10 seconds to build instance. [ 928.511640] env[62383]: INFO nova.compute.manager [-] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Took 1.95 seconds to deallocate network for instance. [ 928.585263] env[62383]: INFO nova.compute.manager [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Took 33.58 seconds to build instance. [ 928.972123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-43b880b1-1072-4724-ac97-1bd8f8bc3cc1 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "c117e858-696f-43dc-9182-70380214737f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.619s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.061535] env[62383]: INFO nova.compute.manager [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Took 0.55 seconds to detach 1 volumes for instance. [ 929.063657] env[62383]: DEBUG nova.compute.manager [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Deleting volume: 884f4ed9-0cfe-43bd-8c26-6c9365c1b781 {{(pid=62383) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 929.091181] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4ecf88d2-0227-4da0-ad57-88f250c69d14 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.101s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.154883] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e1f80c-4f4b-4ab8-a00e-f9cd99151219 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.161830] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c3c3c2-4c90-4283-ab67-43fd6b0f5598 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.200326] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6075304f-222b-4eae-abf2-8d397e6232b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.208788] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d826bf72-da8b-4d3c-8704-747b1166f95a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.223324] env[62383]: DEBUG nova.compute.provider_tree [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.268160] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "c117e858-696f-43dc-9182-70380214737f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.268828] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "c117e858-696f-43dc-9182-70380214737f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.269422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "c117e858-696f-43dc-9182-70380214737f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.269422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "c117e858-696f-43dc-9182-70380214737f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.269422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "c117e858-696f-43dc-9182-70380214737f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.272780] env[62383]: INFO nova.compute.manager [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Terminating instance [ 929.624282] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.726263] env[62383]: DEBUG nova.scheduler.client.report [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.781665] env[62383]: DEBUG nova.compute.manager [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 929.783913] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 929.786843] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e60c7b-d8c5-4367-80a3-75aef5a79f4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.795467] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 929.795723] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f5a4d02-17b5-4872-9266-305f19f39465 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.804016] env[62383]: DEBUG oslo_vmware.api [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 929.804016] env[62383]: value = "task-2451957" [ 929.804016] env[62383]: _type = "Task" [ 929.804016] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.816143] env[62383]: DEBUG oslo_vmware.api [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.232219] env[62383]: DEBUG nova.compute.manager [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Received event network-changed-b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 930.232674] env[62383]: DEBUG nova.compute.manager [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Refreshing instance network info cache due to event network-changed-b34b897a-3f37-4846-a7e9-0c248d1ecaf9. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 930.233110] env[62383]: DEBUG oslo_concurrency.lockutils [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] Acquiring lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.233448] env[62383]: DEBUG oslo_concurrency.lockutils [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] Acquired lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.234238] env[62383]: DEBUG nova.network.neutron [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Refreshing network info cache for port b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 930.236942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.237825] env[62383]: DEBUG nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 930.242593] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.990s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.242979] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.243325] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 930.243808] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.239s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.246550] env[62383]: INFO nova.compute.claims [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.253815] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-906babc9-f1d2-4049-b7b3-58da620f3e04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.271019] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd478a1-ec88-4508-8c29-b3c0163cf626 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.289708] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c065039-2b67-4d49-88a6-6455793af6c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.300327] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e4fb62-ec92-459b-a833-e03b151910a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.334361] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178358MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 930.334361] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.340069] env[62383]: DEBUG oslo_vmware.api [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451957, 'name': PowerOffVM_Task, 'duration_secs': 0.360205} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.340269] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.340432] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 930.340693] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04883ad2-dc54-4829-9cda-caa1441a5b21 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.483485] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 930.485435] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 930.486677] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Deleting the datastore file [datastore1] c117e858-696f-43dc-9182-70380214737f {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.486677] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8fd095c-57af-4182-87a3-193cd8d58d7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.496022] env[62383]: DEBUG oslo_vmware.api [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 930.496022] env[62383]: value = "task-2451959" [ 930.496022] env[62383]: _type = "Task" [ 930.496022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.505211] env[62383]: DEBUG oslo_vmware.api [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.688433] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "1e367665-1d4b-4686-ac79-c946423c1762" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.688433] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.759022] env[62383]: DEBUG nova.compute.utils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.759022] env[62383]: DEBUG nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 930.759022] env[62383]: DEBUG nova.network.neutron [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.818357] env[62383]: DEBUG nova.policy [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec3589360ad54088ad4151a82febcb99', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9f2dba3783e48968554ca75be01cd5c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 931.004738] env[62383]: DEBUG oslo_vmware.api [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2451959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242323} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.043838] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.043838] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.043838] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.043838] env[62383]: INFO nova.compute.manager [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: c117e858-696f-43dc-9182-70380214737f] Took 1.22 seconds to destroy the instance on the hypervisor. [ 931.043838] env[62383]: DEBUG oslo.service.loopingcall [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.043838] env[62383]: DEBUG nova.compute.manager [-] [instance: c117e858-696f-43dc-9182-70380214737f] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 931.043838] env[62383]: DEBUG nova.network.neutron [-] [instance: c117e858-696f-43dc-9182-70380214737f] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 931.272216] env[62383]: INFO nova.compute.manager [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Detaching volume 34440909-fbf8-4f00-9d5a-dd07de7bcefa [ 931.272216] env[62383]: DEBUG nova.network.neutron [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Updated VIF entry in instance network info cache for port b34b897a-3f37-4846-a7e9-0c248d1ecaf9. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 931.272216] env[62383]: DEBUG nova.network.neutron [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Updating instance_info_cache with network_info: [{"id": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "address": "fa:16:3e:93:f9:c4", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb34b897a-3f", "ovs_interfaceid": "b34b897a-3f37-4846-a7e9-0c248d1ecaf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.272216] env[62383]: INFO nova.virt.block_device [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Attempting to driver detach volume 34440909-fbf8-4f00-9d5a-dd07de7bcefa from mountpoint /dev/sdb [ 931.272216] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 931.272216] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496502', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'name': 'volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e367665-1d4b-4686-ac79-c946423c1762', 'attached_at': '', 'detached_at': '', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'serial': '34440909-fbf8-4f00-9d5a-dd07de7bcefa'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 931.272216] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250d8543-8e34-46c0-bb87-9301739d925c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.272216] env[62383]: DEBUG nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 931.272216] env[62383]: DEBUG nova.network.neutron [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Successfully created port: f5edd1b5-561e-477f-99c4-66985de210cd {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.272216] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9c4475-78ee-470e-8c27-28d16d62f4fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.287474] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b54bd6-3b27-4b4c-adbe-e85bf87d8763 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.306712] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163aef08-9659-4fd6-85c3-94d06e7241c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.325375] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] The volume has not been displaced from its original location: [datastore2] volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa/volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 931.331335] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 931.334918] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-227806e7-2e1a-4fed-a148-27c1848d211a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.356177] env[62383]: DEBUG oslo_vmware.api [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 931.356177] env[62383]: value = "task-2451960" [ 931.356177] env[62383]: _type = "Task" [ 931.356177] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.365983] env[62383]: DEBUG oslo_vmware.api [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451960, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.505055] env[62383]: DEBUG nova.compute.manager [req-3beac7e3-86f7-40c0-b8d0-548cd4011575 req-f5dc0556-7f14-4e4e-987c-702bb6b22268 service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-vif-deleted-8f856cf6-4638-4fe2-8094-e2856f5362aa {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 931.505785] env[62383]: INFO nova.compute.manager [req-3beac7e3-86f7-40c0-b8d0-548cd4011575 req-f5dc0556-7f14-4e4e-987c-702bb6b22268 service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Neutron deleted interface 8f856cf6-4638-4fe2-8094-e2856f5362aa; detaching it from the instance and deleting it from the info cache [ 931.505785] env[62383]: DEBUG nova.network.neutron [req-3beac7e3-86f7-40c0-b8d0-548cd4011575 req-f5dc0556-7f14-4e4e-987c-702bb6b22268 service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Updating instance_info_cache with network_info: [{"id": "88efd462-2836-4b8e-9deb-20be5e6eff71", "address": "fa:16:3e:c4:11:77", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap88efd462-28", "ovs_interfaceid": "88efd462-2836-4b8e-9deb-20be5e6eff71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "30736aa7-603a-46e9-8757-f52213094f87", "address": "fa:16:3e:4e:af:ee", "network": {"id": "77f03e6f-d6e8-40ae-bc84-b24afda39ec7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1432825120", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30736aa7-60", "ovs_interfaceid": "30736aa7-603a-46e9-8757-f52213094f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.711208] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d39b2d-ab05-46ec-8b89-3755ca80abd7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.712355] env[62383]: DEBUG oslo_concurrency.lockutils [req-b5bb2800-4ea9-404c-ada0-5e48c7269fd3 req-be2ae4cb-cf71-45df-945d-572338874784 service nova] Releasing lock "refresh_cache-eb632e2d-b71e-446d-83a2-0bab1d823d27" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.717115] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ade48a-6d0a-41f5-b454-bfb0c01063e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.750699] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54343ba6-cb57-4697-b4d5-3077aa57cb86 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.760148] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115874fd-e1f1-4d5f-9fad-7e39e03c63f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.780361] env[62383]: DEBUG nova.compute.provider_tree [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.834597] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.834982] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.866365] env[62383]: DEBUG oslo_vmware.api [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451960, 'name': ReconfigVM_Task, 'duration_secs': 0.274009} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.866952] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 931.872056] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c76a5a8-9f97-45d0-9cd6-7187c63d075b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.887390] env[62383]: DEBUG oslo_vmware.api [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 931.887390] env[62383]: value = "task-2451961" [ 931.887390] env[62383]: _type = "Task" [ 931.887390] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.895783] env[62383]: DEBUG oslo_vmware.api [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451961, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.014018] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e98ea57d-7579-4eb7-91b7-e93197e8e009 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.021582] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7d943d-501c-4a27-879b-d3004051cb0b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.060857] env[62383]: DEBUG nova.compute.manager [req-3beac7e3-86f7-40c0-b8d0-548cd4011575 req-f5dc0556-7f14-4e4e-987c-702bb6b22268 service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Detach interface failed, port_id=8f856cf6-4638-4fe2-8094-e2856f5362aa, reason: Instance c117e858-696f-43dc-9182-70380214737f could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 932.287801] env[62383]: DEBUG nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 932.293044] env[62383]: DEBUG nova.scheduler.client.report [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.335270] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 932.335570] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.335686] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.335867] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.336164] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.336404] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 932.336675] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 932.336989] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 932.337254] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 932.337479] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 932.337712] env[62383]: DEBUG nova.virt.hardware [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 932.338152] env[62383]: DEBUG nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 932.341598] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c55e8d-b76d-4443-bf93-7cb4f80e308d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.354803] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89932a6-0220-4976-aca2-68bd68a50092 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.401384] env[62383]: DEBUG oslo_vmware.api [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451961, 'name': ReconfigVM_Task, 'duration_secs': 0.171951} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.401696] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496502', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'name': 'volume-34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '1e367665-1d4b-4686-ac79-c946423c1762', 'attached_at': '', 'detached_at': '', 'volume_id': '34440909-fbf8-4f00-9d5a-dd07de7bcefa', 'serial': '34440909-fbf8-4f00-9d5a-dd07de7bcefa'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 932.604843] env[62383]: DEBUG nova.network.neutron [-] [instance: c117e858-696f-43dc-9182-70380214737f] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.801501] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.803429] env[62383]: DEBUG nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 932.804673] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.783s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.806134] env[62383]: INFO nova.compute.claims [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 932.880666] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.975032] env[62383]: DEBUG nova.objects.instance [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lazy-loading 'flavor' on Instance uuid 1e367665-1d4b-4686-ac79-c946423c1762 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.989464] env[62383]: DEBUG nova.network.neutron [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Successfully updated port: f5edd1b5-561e-477f-99c4-66985de210cd {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.109910] env[62383]: INFO nova.compute.manager [-] [instance: c117e858-696f-43dc-9182-70380214737f] Took 2.10 seconds to deallocate network for instance. [ 933.316542] env[62383]: DEBUG nova.compute.utils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 933.318097] env[62383]: DEBUG nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 933.318440] env[62383]: DEBUG nova.network.neutron [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 933.369305] env[62383]: DEBUG nova.policy [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd57fa242b63d41aabf6f20dc8ee9e520', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '836aef2835c34521a4fe9a9446deb809', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 933.494541] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "refresh_cache-df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.494714] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "refresh_cache-df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.497965] env[62383]: DEBUG nova.network.neutron [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.545822] env[62383]: DEBUG nova.compute.manager [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-vif-deleted-88efd462-2836-4b8e-9deb-20be5e6eff71 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 933.546053] env[62383]: DEBUG nova.compute.manager [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: c117e858-696f-43dc-9182-70380214737f] Received event network-vif-deleted-30736aa7-603a-46e9-8757-f52213094f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 933.546234] env[62383]: DEBUG nova.compute.manager [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Received event network-vif-plugged-f5edd1b5-561e-477f-99c4-66985de210cd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 933.546590] env[62383]: DEBUG oslo_concurrency.lockutils [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] Acquiring lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.546673] env[62383]: DEBUG oslo_concurrency.lockutils [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.546772] env[62383]: DEBUG oslo_concurrency.lockutils [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.546971] env[62383]: DEBUG nova.compute.manager [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] No waiting events found dispatching network-vif-plugged-f5edd1b5-561e-477f-99c4-66985de210cd {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 933.547181] env[62383]: WARNING nova.compute.manager [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Received unexpected event network-vif-plugged-f5edd1b5-561e-477f-99c4-66985de210cd for instance with vm_state building and task_state spawning. [ 933.547343] env[62383]: DEBUG nova.compute.manager [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Received event network-changed-f5edd1b5-561e-477f-99c4-66985de210cd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 933.547486] env[62383]: DEBUG nova.compute.manager [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Refreshing instance network info cache due to event network-changed-f5edd1b5-561e-477f-99c4-66985de210cd. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 933.547653] env[62383]: DEBUG oslo_concurrency.lockutils [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] Acquiring lock "refresh_cache-df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.617591] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.689573] env[62383]: DEBUG nova.network.neutron [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Successfully created port: 8e058335-a5bd-4e58-ab6b-9d03b28c03c3 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.825186] env[62383]: DEBUG nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 933.899316] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.899549] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.988662] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6be6b908-2dca-4910-bbea-80fd31b70718 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.301s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.025332] env[62383]: DEBUG nova.network.neutron [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.158775] env[62383]: DEBUG nova.network.neutron [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Updating instance_info_cache with network_info: [{"id": "f5edd1b5-561e-477f-99c4-66985de210cd", "address": "fa:16:3e:9d:e0:b4", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5edd1b5-56", "ovs_interfaceid": "f5edd1b5-561e-477f-99c4-66985de210cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.201121] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffc3bb8-0fdc-45bf-a0ab-c8ca411ab8ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.209162] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27880edb-2d4e-4db2-bcc4-b272d125619e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.239096] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232e58d1-dd2b-4e5c-b707-fafe61fb529c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.246098] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe48b080-93b4-4e4b-a8f6-d0d66dfb2ce8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.258957] env[62383]: DEBUG nova.compute.provider_tree [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.401826] env[62383]: DEBUG nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 934.661014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "refresh_cache-df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.661386] env[62383]: DEBUG nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Instance network_info: |[{"id": "f5edd1b5-561e-477f-99c4-66985de210cd", "address": "fa:16:3e:9d:e0:b4", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5edd1b5-56", "ovs_interfaceid": "f5edd1b5-561e-477f-99c4-66985de210cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 934.661701] env[62383]: DEBUG oslo_concurrency.lockutils [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] Acquired lock "refresh_cache-df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.661881] env[62383]: DEBUG nova.network.neutron [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Refreshing network info cache for port f5edd1b5-561e-477f-99c4-66985de210cd {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 934.664477] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:e0:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b36c5ae6-c344-4bd1-8239-29128e2bbfbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5edd1b5-561e-477f-99c4-66985de210cd', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 934.674225] env[62383]: DEBUG oslo.service.loopingcall [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 934.676105] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 934.678635] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf3b247f-3b43-4d7f-982c-0caf6900fc6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.708393] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 934.708393] env[62383]: value = "task-2451962" [ 934.708393] env[62383]: _type = "Task" [ 934.708393] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.718849] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451962, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.762350] env[62383]: DEBUG nova.scheduler.client.report [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 934.835313] env[62383]: DEBUG nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 934.868908] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 934.869181] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.869342] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 934.869521] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.869667] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 934.869921] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 934.870051] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 934.870305] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 934.870355] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 934.870486] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 934.871072] env[62383]: DEBUG nova.virt.hardware [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 934.871530] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbb3b4a-0ee6-48b5-9e4c-54447b9fc076 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.879649] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8931c0-dac8-4c4b-b399-e753e12257d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.933825] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.205204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "1e367665-1d4b-4686-ac79-c946423c1762" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.205204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.205204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "1e367665-1d4b-4686-ac79-c946423c1762-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.205204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.205204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.207019] env[62383]: INFO nova.compute.manager [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Terminating instance [ 935.228237] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451962, 'name': CreateVM_Task, 'duration_secs': 0.352867} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.229237] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 935.229906] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.230076] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.230394] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 935.234023] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d92b6e5-d655-417a-be86-8c8b3cf5245c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.235544] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 935.235544] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52092512-a544-d548-780d-ee6e1ddd8f28" [ 935.235544] env[62383]: _type = "Task" [ 935.235544] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.244266] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52092512-a544-d548-780d-ee6e1ddd8f28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.267829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.268305] env[62383]: DEBUG nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 935.273190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.002s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.273190] env[62383]: INFO nova.compute.claims [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.276332] env[62383]: DEBUG nova.network.neutron [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Successfully updated port: 8e058335-a5bd-4e58-ab6b-9d03b28c03c3 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.412134] env[62383]: DEBUG nova.network.neutron [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Updated VIF entry in instance network info cache for port f5edd1b5-561e-477f-99c4-66985de210cd. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 935.412597] env[62383]: DEBUG nova.network.neutron [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Updating instance_info_cache with network_info: [{"id": "f5edd1b5-561e-477f-99c4-66985de210cd", "address": "fa:16:3e:9d:e0:b4", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5edd1b5-56", "ovs_interfaceid": "f5edd1b5-561e-477f-99c4-66985de210cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.577286] env[62383]: DEBUG nova.compute.manager [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Received event network-vif-plugged-8e058335-a5bd-4e58-ab6b-9d03b28c03c3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 935.577286] env[62383]: DEBUG oslo_concurrency.lockutils [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] Acquiring lock "f193af26-eba8-471f-a00e-0afa9b190d0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.577367] env[62383]: DEBUG oslo_concurrency.lockutils [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.577507] env[62383]: DEBUG oslo_concurrency.lockutils [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.577659] env[62383]: DEBUG nova.compute.manager [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] No waiting events found dispatching network-vif-plugged-8e058335-a5bd-4e58-ab6b-9d03b28c03c3 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 935.577826] env[62383]: WARNING nova.compute.manager [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Received unexpected event network-vif-plugged-8e058335-a5bd-4e58-ab6b-9d03b28c03c3 for instance with vm_state building and task_state spawning. [ 935.577988] env[62383]: DEBUG nova.compute.manager [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Received event network-changed-8e058335-a5bd-4e58-ab6b-9d03b28c03c3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 935.578166] env[62383]: DEBUG nova.compute.manager [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Refreshing instance network info cache due to event network-changed-8e058335-a5bd-4e58-ab6b-9d03b28c03c3. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 935.578349] env[62383]: DEBUG oslo_concurrency.lockutils [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] Acquiring lock "refresh_cache-f193af26-eba8-471f-a00e-0afa9b190d0b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.578484] env[62383]: DEBUG oslo_concurrency.lockutils [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] Acquired lock "refresh_cache-f193af26-eba8-471f-a00e-0afa9b190d0b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.578641] env[62383]: DEBUG nova.network.neutron [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Refreshing network info cache for port 8e058335-a5bd-4e58-ab6b-9d03b28c03c3 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 935.718883] env[62383]: DEBUG nova.compute.manager [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.719165] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.720502] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3116416-e975-4066-a65d-7593b5a2427b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.728551] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.728840] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-828b598d-91cb-43d1-a9a3-95e73e5e927d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.734706] env[62383]: DEBUG oslo_vmware.api [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 935.734706] env[62383]: value = "task-2451963" [ 935.734706] env[62383]: _type = "Task" [ 935.734706] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.747022] env[62383]: DEBUG oslo_vmware.api [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.751254] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52092512-a544-d548-780d-ee6e1ddd8f28, 'name': SearchDatastore_Task, 'duration_secs': 0.010044} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.751648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.751956] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 935.752495] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.752495] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.752611] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 935.752876] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53d3518a-0cba-4bca-84fe-03b0678f1991 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.761229] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 935.761477] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 935.762297] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16a28c62-c703-4044-a6d9-e7d96cf40e23 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.767690] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 935.767690] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a855a8-4db4-925e-edec-c3e9d5ff3eb6" [ 935.767690] env[62383]: _type = "Task" [ 935.767690] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.775465] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a855a8-4db4-925e-edec-c3e9d5ff3eb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.779484] env[62383]: DEBUG nova.compute.utils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 935.782545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "refresh_cache-f193af26-eba8-471f-a00e-0afa9b190d0b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.782813] env[62383]: DEBUG nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 935.782991] env[62383]: DEBUG nova.network.neutron [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 935.839418] env[62383]: DEBUG nova.policy [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db7e9998210e485fa855f0375f63ad55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35016a724e7e4fa2b0fc19396d8e736b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 935.915416] env[62383]: DEBUG oslo_concurrency.lockutils [req-5d784c72-c319-48a9-9e0e-76c2dcf4ea42 req-89b0e9cc-8d27-463a-8fb7-4636e5146efc service nova] Releasing lock "refresh_cache-df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.123461] env[62383]: DEBUG nova.network.neutron [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.140364] env[62383]: DEBUG nova.network.neutron [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Successfully created port: d1287bdf-752f-4429-ad35-e47c6ef4804a {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.245194] env[62383]: DEBUG oslo_vmware.api [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451963, 'name': PowerOffVM_Task, 'duration_secs': 0.247275} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.245501] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.245694] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.245958] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62f4a4dd-4c64-4c63-aa09-9aa02b603b9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.250557] env[62383]: DEBUG nova.network.neutron [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.277899] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a855a8-4db4-925e-edec-c3e9d5ff3eb6, 'name': SearchDatastore_Task, 'duration_secs': 0.011801} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.278689] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5f37049-e692-4834-b404-52ec7c2c697d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.284098] env[62383]: DEBUG nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 936.286619] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 936.286619] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fe8ef2-687d-368f-c609-e5b2529d8d72" [ 936.286619] env[62383]: _type = "Task" [ 936.286619] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.297770] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fe8ef2-687d-368f-c609-e5b2529d8d72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.307730] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.307950] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.308243] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleting the datastore file [datastore1] 1e367665-1d4b-4686-ac79-c946423c1762 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.308536] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1684a0a-e1c3-406f-af66-50e178cc2f63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.317710] env[62383]: DEBUG oslo_vmware.api [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 936.317710] env[62383]: value = "task-2451965" [ 936.317710] env[62383]: _type = "Task" [ 936.317710] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.326114] env[62383]: DEBUG oslo_vmware.api [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.658256] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9e8246-5301-40b0-b5b8-9b9964f18dcf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.665908] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7884cc-fc0d-4a8d-8ca5-2d653c70157a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.697529] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79c6b70-6e3a-4ec1-88eb-4432299f7725 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.705325] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711e6e18-59d6-42e1-9eb6-0040df351d16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.719254] env[62383]: DEBUG nova.compute.provider_tree [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.755368] env[62383]: DEBUG oslo_concurrency.lockutils [req-98680f67-8d55-49f7-9186-893d6fb200e8 req-7f5d4f5c-3a67-4df4-8dc8-3d727a6ff8ad service nova] Releasing lock "refresh_cache-f193af26-eba8-471f-a00e-0afa9b190d0b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.755740] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquired lock "refresh_cache-f193af26-eba8-471f-a00e-0afa9b190d0b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.755901] env[62383]: DEBUG nova.network.neutron [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.802914] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fe8ef2-687d-368f-c609-e5b2529d8d72, 'name': SearchDatastore_Task, 'duration_secs': 0.017039} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.803154] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.803440] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] df5e6f1b-ac01-4ac0-bc84-b49c54c3e771/df5e6f1b-ac01-4ac0-bc84-b49c54c3e771.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 936.803762] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bf921f49-e451-4760-8de9-e368a811942b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.812139] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 936.812139] env[62383]: value = "task-2451966" [ 936.812139] env[62383]: _type = "Task" [ 936.812139] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.820574] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451966, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.829204] env[62383]: DEBUG oslo_vmware.api [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2451965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220555} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.829432] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.829614] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.829795] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.829970] env[62383]: INFO nova.compute.manager [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Took 1.11 seconds to destroy the instance on the hypervisor. [ 936.830870] env[62383]: DEBUG oslo.service.loopingcall [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.830870] env[62383]: DEBUG nova.compute.manager [-] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.830870] env[62383]: DEBUG nova.network.neutron [-] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.223105] env[62383]: DEBUG nova.scheduler.client.report [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.298466] env[62383]: DEBUG nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 937.302936] env[62383]: DEBUG nova.network.neutron [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.320739] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451966, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.337947] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 937.338226] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.338387] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 937.338572] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.338718] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 937.338890] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 937.339365] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 937.339537] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 937.339707] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 937.339869] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 937.340224] env[62383]: DEBUG nova.virt.hardware [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 937.341096] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e489454-33fc-4d68-80e9-b9750ef96a66 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.349547] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41537cd-2c67-4b48-bc2d-22a5b574a4e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.470136] env[62383]: DEBUG nova.network.neutron [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Updating instance_info_cache with network_info: [{"id": "8e058335-a5bd-4e58-ab6b-9d03b28c03c3", "address": "fa:16:3e:47:2b:03", "network": {"id": "52e1a5bc-6a0a-4a80-94bf-e90c9631499f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1823060552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "836aef2835c34521a4fe9a9446deb809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e058335-a5", "ovs_interfaceid": "8e058335-a5bd-4e58-ab6b-9d03b28c03c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.633590] env[62383]: DEBUG nova.compute.manager [req-f99f3b82-e082-49a1-b3cf-3cd97c1aec5d req-6790261f-6591-4581-b40a-49abb6d16558 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Received event network-vif-deleted-e09c6085-476c-4c95-a6e0-1175a4786e4d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 937.633783] env[62383]: INFO nova.compute.manager [req-f99f3b82-e082-49a1-b3cf-3cd97c1aec5d req-6790261f-6591-4581-b40a-49abb6d16558 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Neutron deleted interface e09c6085-476c-4c95-a6e0-1175a4786e4d; detaching it from the instance and deleting it from the info cache [ 937.633958] env[62383]: DEBUG nova.network.neutron [req-f99f3b82-e082-49a1-b3cf-3cd97c1aec5d req-6790261f-6591-4581-b40a-49abb6d16558 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.732693] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.733331] env[62383]: DEBUG nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 937.736382] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.592s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.736670] env[62383]: DEBUG nova.objects.instance [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 937.747009] env[62383]: DEBUG nova.network.neutron [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Successfully updated port: d1287bdf-752f-4429-ad35-e47c6ef4804a {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 937.822161] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451966, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517266} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.822161] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] df5e6f1b-ac01-4ac0-bc84-b49c54c3e771/df5e6f1b-ac01-4ac0-bc84-b49c54c3e771.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 937.822161] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 937.822471] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04b31578-5de9-4672-b9bc-70f3a554ecf4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.829082] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 937.829082] env[62383]: value = "task-2451967" [ 937.829082] env[62383]: _type = "Task" [ 937.829082] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.836451] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.973034] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Releasing lock "refresh_cache-f193af26-eba8-471f-a00e-0afa9b190d0b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.973034] env[62383]: DEBUG nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Instance network_info: |[{"id": "8e058335-a5bd-4e58-ab6b-9d03b28c03c3", "address": "fa:16:3e:47:2b:03", "network": {"id": "52e1a5bc-6a0a-4a80-94bf-e90c9631499f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1823060552-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "836aef2835c34521a4fe9a9446deb809", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4c8c8fd-baca-4e60-97dc-ff0418d63215", "external-id": "nsx-vlan-transportzone-178", "segmentation_id": 178, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e058335-a5", "ovs_interfaceid": "8e058335-a5bd-4e58-ab6b-9d03b28c03c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 937.973470] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:2b:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4c8c8fd-baca-4e60-97dc-ff0418d63215', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e058335-a5bd-4e58-ab6b-9d03b28c03c3', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.981126] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Creating folder: Project (836aef2835c34521a4fe9a9446deb809). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 937.981927] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a56a156c-8beb-4f37-90cb-c9181fe42fe8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.994042] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Created folder: Project (836aef2835c34521a4fe9a9446deb809) in parent group-v496304. [ 937.994266] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Creating folder: Instances. Parent ref: group-v496529. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 937.994511] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce5a5144-9fb2-4a30-a862-cc61c487539a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.004102] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Created folder: Instances in parent group-v496529. [ 938.004102] env[62383]: DEBUG oslo.service.loopingcall [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 938.004102] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 938.004841] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c6527c2-fcd4-40a6-b0b0-d2527949e244 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.024756] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 938.024756] env[62383]: value = "task-2451970" [ 938.024756] env[62383]: _type = "Task" [ 938.024756] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.032926] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451970, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.117030] env[62383]: DEBUG nova.network.neutron [-] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.138939] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52d7aa85-ed96-42ba-a0e5-317b5b02b6a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.149458] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1d57c8-a005-4dd8-af1b-3a6ca45d2142 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.181963] env[62383]: DEBUG nova.compute.manager [req-f99f3b82-e082-49a1-b3cf-3cd97c1aec5d req-6790261f-6591-4581-b40a-49abb6d16558 service nova] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Detach interface failed, port_id=e09c6085-476c-4c95-a6e0-1175a4786e4d, reason: Instance 1e367665-1d4b-4686-ac79-c946423c1762 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 938.246382] env[62383]: DEBUG nova.compute.utils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 938.248696] env[62383]: DEBUG nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 938.249218] env[62383]: DEBUG nova.network.neutron [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 938.252882] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "refresh_cache-1ab60ef9-4209-4097-8a2c-a55e3a6684b2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.253058] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "refresh_cache-1ab60ef9-4209-4097-8a2c-a55e3a6684b2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.253202] env[62383]: DEBUG nova.network.neutron [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 938.292057] env[62383]: DEBUG nova.policy [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6324b3a4f5a24752b0bef1b5d79ea2ff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fabc88f824a44c57b19a07a605fb89fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 938.341293] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076124} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.341577] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 938.342360] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33171f68-5182-4a6c-8c7e-d7a666707a83 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.365982] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] df5e6f1b-ac01-4ac0-bc84-b49c54c3e771/df5e6f1b-ac01-4ac0-bc84-b49c54c3e771.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 938.366319] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23e28b55-4e8f-4315-8557-7b107bf5e4f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.385917] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 938.385917] env[62383]: value = "task-2451971" [ 938.385917] env[62383]: _type = "Task" [ 938.385917] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.394283] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451971, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.534397] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451970, 'name': CreateVM_Task, 'duration_secs': 0.36012} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.534570] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 938.535262] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.535426] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.535747] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 938.535993] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d199a916-3207-4647-9783-f502d88a1b2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.540507] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 938.540507] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5239ff32-36ab-08ae-4c03-ab776915023e" [ 938.540507] env[62383]: _type = "Task" [ 938.540507] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.548281] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5239ff32-36ab-08ae-4c03-ab776915023e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.562540] env[62383]: DEBUG nova.network.neutron [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Successfully created port: 859d075c-6248-4d10-83d3-d2985a960584 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.618645] env[62383]: INFO nova.compute.manager [-] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Took 1.79 seconds to deallocate network for instance. [ 938.750642] env[62383]: DEBUG oslo_concurrency.lockutils [None req-525f1c82-071c-4601-971b-9447edfadaf4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.751793] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 14.577s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.755714] env[62383]: DEBUG nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 938.798202] env[62383]: DEBUG nova.network.neutron [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.895598] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451971, 'name': ReconfigVM_Task, 'duration_secs': 0.33837} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.898055] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Reconfigured VM instance instance-00000052 to attach disk [datastore2] df5e6f1b-ac01-4ac0-bc84-b49c54c3e771/df5e6f1b-ac01-4ac0-bc84-b49c54c3e771.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.898699] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d191faf-608d-41da-83fe-98259d06e89e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.904995] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 938.904995] env[62383]: value = "task-2451972" [ 938.904995] env[62383]: _type = "Task" [ 938.904995] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.912347] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451972, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.929629] env[62383]: DEBUG nova.network.neutron [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Updating instance_info_cache with network_info: [{"id": "d1287bdf-752f-4429-ad35-e47c6ef4804a", "address": "fa:16:3e:13:71:1b", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1287bdf-75", "ovs_interfaceid": "d1287bdf-752f-4429-ad35-e47c6ef4804a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.050716] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5239ff32-36ab-08ae-4c03-ab776915023e, 'name': SearchDatastore_Task, 'duration_secs': 0.009027} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.051031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.051284] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 939.051532] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.051679] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.051856] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 939.052132] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12f83768-5099-416a-9dee-a950b7bb1a9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.061274] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 939.061453] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 939.062154] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-441c70a5-ab4c-4101-b0e8-9d75809373b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.067282] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 939.067282] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5224bde7-86de-644d-4139-6ead1c7a08f1" [ 939.067282] env[62383]: _type = "Task" [ 939.067282] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.075629] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5224bde7-86de-644d-4139-6ead1c7a08f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.126541] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.418670] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451972, 'name': Rename_Task, 'duration_secs': 0.170614} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.419010] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 939.419283] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e7d1166-af71-4eb1-a6d4-1b3f04a495d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.426432] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 939.426432] env[62383]: value = "task-2451973" [ 939.426432] env[62383]: _type = "Task" [ 939.426432] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.435564] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "refresh_cache-1ab60ef9-4209-4097-8a2c-a55e3a6684b2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.435914] env[62383]: DEBUG nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Instance network_info: |[{"id": "d1287bdf-752f-4429-ad35-e47c6ef4804a", "address": "fa:16:3e:13:71:1b", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1287bdf-75", "ovs_interfaceid": "d1287bdf-752f-4429-ad35-e47c6ef4804a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 939.436491] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.439428] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:71:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1287bdf-752f-4429-ad35-e47c6ef4804a', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 939.447013] env[62383]: DEBUG oslo.service.loopingcall [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.447402] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 939.447624] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6bd7990e-1479-4902-abb6-7d337b0cf894 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.470098] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 939.470098] env[62383]: value = "task-2451974" [ 939.470098] env[62383]: _type = "Task" [ 939.470098] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.479632] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451974, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.577127] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5224bde7-86de-644d-4139-6ead1c7a08f1, 'name': SearchDatastore_Task, 'duration_secs': 0.008992} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.580413] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a99d433f-65ed-421a-a59c-42f2cae30764 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.585710] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 939.585710] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]529dfa91-7f6d-5294-9336-9146d1c11c3c" [ 939.585710] env[62383]: _type = "Task" [ 939.585710] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.593524] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529dfa91-7f6d-5294-9336-9146d1c11c3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.626010] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3a75a2-b102-4df2-a4d0-44a1d329070f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.633854] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6829d32-4923-4cf8-b429-11ecd97e9e41 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.668157] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aba1bd-d507-4788-8515-cc260a26196d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.672418] env[62383]: DEBUG nova.compute.manager [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Received event network-vif-plugged-d1287bdf-752f-4429-ad35-e47c6ef4804a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 939.672633] env[62383]: DEBUG oslo_concurrency.lockutils [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] Acquiring lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.672847] env[62383]: DEBUG oslo_concurrency.lockutils [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.673027] env[62383]: DEBUG oslo_concurrency.lockutils [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.673215] env[62383]: DEBUG nova.compute.manager [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] No waiting events found dispatching network-vif-plugged-d1287bdf-752f-4429-ad35-e47c6ef4804a {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 939.673378] env[62383]: WARNING nova.compute.manager [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Received unexpected event network-vif-plugged-d1287bdf-752f-4429-ad35-e47c6ef4804a for instance with vm_state building and task_state spawning. [ 939.673545] env[62383]: DEBUG nova.compute.manager [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Received event network-changed-d1287bdf-752f-4429-ad35-e47c6ef4804a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 939.673711] env[62383]: DEBUG nova.compute.manager [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Refreshing instance network info cache due to event network-changed-d1287bdf-752f-4429-ad35-e47c6ef4804a. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 939.673895] env[62383]: DEBUG oslo_concurrency.lockutils [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] Acquiring lock "refresh_cache-1ab60ef9-4209-4097-8a2c-a55e3a6684b2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.674064] env[62383]: DEBUG oslo_concurrency.lockutils [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] Acquired lock "refresh_cache-1ab60ef9-4209-4097-8a2c-a55e3a6684b2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.674316] env[62383]: DEBUG nova.network.neutron [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Refreshing network info cache for port d1287bdf-752f-4429-ad35-e47c6ef4804a {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 939.685482] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ccd751-2115-44a8-9380-846a999d3601 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.705786] env[62383]: DEBUG nova.compute.provider_tree [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.768750] env[62383]: DEBUG nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 939.796284] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.796619] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.796839] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.797116] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.797346] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.797564] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.797847] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.798097] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.798346] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.798584] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.798830] env[62383]: DEBUG nova.virt.hardware [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.799734] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35ddc1f-8a21-497c-ba0b-e9ed1017f19a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.807253] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02442387-0daf-4280-b4dd-14f72bd5c07d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.936601] env[62383]: DEBUG oslo_vmware.api [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451973, 'name': PowerOnVM_Task, 'duration_secs': 0.493193} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.936924] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.937158] env[62383]: INFO nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Took 7.65 seconds to spawn the instance on the hypervisor. [ 939.937338] env[62383]: DEBUG nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 939.938117] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cfed12-1795-41ff-b46f-fea087267a93 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.980536] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451974, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.053224] env[62383]: DEBUG nova.network.neutron [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Successfully updated port: 859d075c-6248-4d10-83d3-d2985a960584 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.096077] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529dfa91-7f6d-5294-9336-9146d1c11c3c, 'name': SearchDatastore_Task, 'duration_secs': 0.013319} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.096347] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.096601] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] f193af26-eba8-471f-a00e-0afa9b190d0b/f193af26-eba8-471f-a00e-0afa9b190d0b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 940.096852] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1a6b2b7-7845-4fd1-b797-c63e8b682f39 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.103872] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 940.103872] env[62383]: value = "task-2451975" [ 940.103872] env[62383]: _type = "Task" [ 940.103872] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.112503] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451975, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.208172] env[62383]: DEBUG nova.scheduler.client.report [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.397649] env[62383]: DEBUG nova.network.neutron [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Updated VIF entry in instance network info cache for port d1287bdf-752f-4429-ad35-e47c6ef4804a. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 940.398133] env[62383]: DEBUG nova.network.neutron [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Updating instance_info_cache with network_info: [{"id": "d1287bdf-752f-4429-ad35-e47c6ef4804a", "address": "fa:16:3e:13:71:1b", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1287bdf-75", "ovs_interfaceid": "d1287bdf-752f-4429-ad35-e47c6ef4804a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.456272] env[62383]: INFO nova.compute.manager [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Took 25.43 seconds to build instance. [ 940.483909] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451974, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.558482] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "refresh_cache-161d6537-fe78-4a42-b8a5-e3d7d78c0154" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.558682] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "refresh_cache-161d6537-fe78-4a42-b8a5-e3d7d78c0154" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.558927] env[62383]: DEBUG nova.network.neutron [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.615055] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451975, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504508} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.615388] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] f193af26-eba8-471f-a00e-0afa9b190d0b/f193af26-eba8-471f-a00e-0afa9b190d0b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.615602] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.615848] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f211ec1-6097-41dd-b2c9-81bf39aaef7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.621939] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 940.621939] env[62383]: value = "task-2451976" [ 940.621939] env[62383]: _type = "Task" [ 940.621939] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.630374] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.901145] env[62383]: DEBUG oslo_concurrency.lockutils [req-3afb545f-4887-485f-b6cc-81760350f0b5 req-fb17d247-0998-40ed-aec6-6f27f0ad93a5 service nova] Releasing lock "refresh_cache-1ab60ef9-4209-4097-8a2c-a55e3a6684b2" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.957623] env[62383]: DEBUG oslo_concurrency.lockutils [None req-246f1db3-ed39-4c22-876d-c907c335944b tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.939s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.981847] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451974, 'name': CreateVM_Task, 'duration_secs': 1.038054} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.982044] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 940.982713] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 940.982881] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.983224] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 940.983482] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aacb916c-a117-4bf7-b4ed-7e31b95083a0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.987900] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 940.987900] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a3b82c-a508-bdea-05ac-88d1738abbfb" [ 940.987900] env[62383]: _type = "Task" [ 940.987900] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.995988] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a3b82c-a508-bdea-05ac-88d1738abbfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.091881] env[62383]: DEBUG nova.network.neutron [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.131411] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077676} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.131757] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.134616] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b9d900-9e0e-474a-b88b-0db24f8e44b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.157399] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] f193af26-eba8-471f-a00e-0afa9b190d0b/f193af26-eba8-471f-a00e-0afa9b190d0b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.157518] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-864dc961-0078-4c76-9b75-1df3bb4c217d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.179223] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 941.179223] env[62383]: value = "task-2451977" [ 941.179223] env[62383]: _type = "Task" [ 941.179223] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.187755] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451977, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.221750] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.470s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.224388] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.571s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.225570] env[62383]: DEBUG nova.objects.instance [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lazy-loading 'resources' on Instance uuid 7740a70f-3c95-49aa-b3ec-0e0effd3efcc {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.252207] env[62383]: DEBUG nova.network.neutron [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Updating instance_info_cache with network_info: [{"id": "859d075c-6248-4d10-83d3-d2985a960584", "address": "fa:16:3e:88:a1:6c", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap859d075c-62", "ovs_interfaceid": "859d075c-6248-4d10-83d3-d2985a960584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.433420] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.433706] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.433919] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.434121] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.434299] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.436498] env[62383]: INFO nova.compute.manager [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Terminating instance [ 941.497729] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a3b82c-a508-bdea-05ac-88d1738abbfb, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.498059] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.498322] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 941.498560] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.498721] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.498930] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.499211] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-025c0f8d-84b5-448c-82cb-6e75af0b9918 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.507144] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.507320] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 941.508061] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45aa5afd-e9fc-49b0-9b70-3a8f3d9f09df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.515128] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 941.515128] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ae13fa-f97e-c52e-cda6-8ac05739c7dc" [ 941.515128] env[62383]: _type = "Task" [ 941.515128] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.523161] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ae13fa-f97e-c52e-cda6-8ac05739c7dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.687869] env[62383]: DEBUG nova.compute.manager [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Received event network-vif-plugged-859d075c-6248-4d10-83d3-d2985a960584 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 941.688132] env[62383]: DEBUG oslo_concurrency.lockutils [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] Acquiring lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.688295] env[62383]: DEBUG oslo_concurrency.lockutils [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.688451] env[62383]: DEBUG oslo_concurrency.lockutils [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.688614] env[62383]: DEBUG nova.compute.manager [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] No waiting events found dispatching network-vif-plugged-859d075c-6248-4d10-83d3-d2985a960584 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 941.688775] env[62383]: WARNING nova.compute.manager [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Received unexpected event network-vif-plugged-859d075c-6248-4d10-83d3-d2985a960584 for instance with vm_state building and task_state spawning. [ 941.688963] env[62383]: DEBUG nova.compute.manager [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Received event network-changed-859d075c-6248-4d10-83d3-d2985a960584 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 941.689178] env[62383]: DEBUG nova.compute.manager [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Refreshing instance network info cache due to event network-changed-859d075c-6248-4d10-83d3-d2985a960584. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 941.689346] env[62383]: DEBUG oslo_concurrency.lockutils [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] Acquiring lock "refresh_cache-161d6537-fe78-4a42-b8a5-e3d7d78c0154" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.692990] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451977, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.754389] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "refresh_cache-161d6537-fe78-4a42-b8a5-e3d7d78c0154" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.754701] env[62383]: DEBUG nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Instance network_info: |[{"id": "859d075c-6248-4d10-83d3-d2985a960584", "address": "fa:16:3e:88:a1:6c", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap859d075c-62", "ovs_interfaceid": "859d075c-6248-4d10-83d3-d2985a960584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 941.754987] env[62383]: DEBUG oslo_concurrency.lockutils [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] Acquired lock "refresh_cache-161d6537-fe78-4a42-b8a5-e3d7d78c0154" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.755190] env[62383]: DEBUG nova.network.neutron [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Refreshing network info cache for port 859d075c-6248-4d10-83d3-d2985a960584 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 941.756871] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:a1:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5f60c972-a72d-4c5f-a250-faadfd6eafbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '859d075c-6248-4d10-83d3-d2985a960584', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 941.764667] env[62383]: DEBUG oslo.service.loopingcall [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.768112] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 941.769303] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d54588d9-2d7e-4b53-a2a4-09a05b0e594a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.787802] env[62383]: INFO nova.scheduler.client.report [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted allocation for migration f38f2250-33df-4650-ba7b-2012a4623baa [ 941.792643] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.792643] env[62383]: value = "task-2451978" [ 941.792643] env[62383]: _type = "Task" [ 941.792643] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.803111] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451978, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.940064] env[62383]: DEBUG nova.compute.manager [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 941.940362] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.941281] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f592ab40-63f2-47a2-9845-4daddd862a0b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.950941] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.951200] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a63d0899-e7e9-46ae-9bf8-9f85987f142a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.957590] env[62383]: DEBUG oslo_vmware.api [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 941.957590] env[62383]: value = "task-2451979" [ 941.957590] env[62383]: _type = "Task" [ 941.957590] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.965734] env[62383]: DEBUG oslo_vmware.api [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451979, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.027627] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ae13fa-f97e-c52e-cda6-8ac05739c7dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010173} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.028368] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc2c3452-5a84-45c8-ada9-412d4966499c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.034250] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 942.034250] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52eb0a85-79d1-89ee-92a8-9ef841f3b644" [ 942.034250] env[62383]: _type = "Task" [ 942.034250] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.041955] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52eb0a85-79d1-89ee-92a8-9ef841f3b644, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.068122] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f855150a-3bf3-4093-b317-d80890a88233 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.075392] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bae9643-6e33-421a-a631-b75396e82c57 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.107687] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376b9c6a-1922-41ba-b202-52a7d516d5fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.116555] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49317b7e-477c-4759-9456-666611852bd5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.134111] env[62383]: DEBUG nova.compute.provider_tree [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.191461] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451977, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.294214] env[62383]: DEBUG oslo_concurrency.lockutils [None req-abecc734-e736-4f5c-b251-c3c30ce2392b tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 21.984s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.306294] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2451978, 'name': CreateVM_Task, 'duration_secs': 0.415103} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.306460] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.307145] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.307312] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.307635] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 942.307885] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eaa87fe-6eea-4559-a0ec-5eac99fb1fd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.312118] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 942.312118] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b18d38-5ca9-d667-079b-114050e2aa82" [ 942.312118] env[62383]: _type = "Task" [ 942.312118] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.320457] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b18d38-5ca9-d667-079b-114050e2aa82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.466342] env[62383]: DEBUG oslo_vmware.api [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451979, 'name': PowerOffVM_Task, 'duration_secs': 0.275201} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.467454] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.467454] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.467454] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24f79cba-a591-4293-bc0f-ca68ea7ed199 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.482695] env[62383]: DEBUG nova.network.neutron [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Updated VIF entry in instance network info cache for port 859d075c-6248-4d10-83d3-d2985a960584. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.482695] env[62383]: DEBUG nova.network.neutron [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Updating instance_info_cache with network_info: [{"id": "859d075c-6248-4d10-83d3-d2985a960584", "address": "fa:16:3e:88:a1:6c", "network": {"id": "3862c455-1703-4121-8e67-dd45a8650b5c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-139397342-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fabc88f824a44c57b19a07a605fb89fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5f60c972-a72d-4c5f-a250-faadfd6eafbe", "external-id": "nsx-vlan-transportzone-932", "segmentation_id": 932, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap859d075c-62", "ovs_interfaceid": "859d075c-6248-4d10-83d3-d2985a960584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.540676] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.541050] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.541050] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleting the datastore file [datastore2] df5e6f1b-ac01-4ac0-bc84-b49c54c3e771 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.541617] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8bf8dc7-72c3-4d37-a9e5-47c94b387a85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.546299] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52eb0a85-79d1-89ee-92a8-9ef841f3b644, 'name': SearchDatastore_Task, 'duration_secs': 0.013075} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.546837] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.547116] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 1ab60ef9-4209-4097-8a2c-a55e3a6684b2/1ab60ef9-4209-4097-8a2c-a55e3a6684b2.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 942.547405] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1027d9d-a4c0-439b-9148-7d92205fc852 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.550505] env[62383]: DEBUG oslo_vmware.api [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 942.550505] env[62383]: value = "task-2451981" [ 942.550505] env[62383]: _type = "Task" [ 942.550505] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.554960] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 942.554960] env[62383]: value = "task-2451982" [ 942.554960] env[62383]: _type = "Task" [ 942.554960] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.561513] env[62383]: DEBUG oslo_vmware.api [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.566081] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.636051] env[62383]: DEBUG nova.scheduler.client.report [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.692637] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451977, 'name': ReconfigVM_Task, 'duration_secs': 1.035663} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.692813] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Reconfigured VM instance instance-00000053 to attach disk [datastore2] f193af26-eba8-471f-a00e-0afa9b190d0b/f193af26-eba8-471f-a00e-0afa9b190d0b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.693414] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d97bfcf-55e2-4421-9b29-b7e0730606ec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.702021] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 942.702021] env[62383]: value = "task-2451983" [ 942.702021] env[62383]: _type = "Task" [ 942.702021] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.708784] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451983, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.828656] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b18d38-5ca9-d667-079b-114050e2aa82, 'name': SearchDatastore_Task, 'duration_secs': 0.01096} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.829123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.829444] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.829743] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.829903] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.830100] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.830392] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8856e73-f760-4a0b-a4f3-c11eb047bc4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.843876] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.844098] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.844873] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6b2073e-4aa9-47cd-92b4-89f823afa00f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.855141] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 942.855141] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52912396-b990-ff36-818b-8a4b7259ff97" [ 942.855141] env[62383]: _type = "Task" [ 942.855141] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.864905] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52912396-b990-ff36-818b-8a4b7259ff97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.985034] env[62383]: DEBUG oslo_concurrency.lockutils [req-05390140-505d-426f-91a2-da64c6206a0a req-b0637593-1aac-4196-a8ad-7d2f351528ac service nova] Releasing lock "refresh_cache-161d6537-fe78-4a42-b8a5-e3d7d78c0154" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.065185] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451982, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.068838] env[62383]: DEBUG oslo_vmware.api [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2451981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198237} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.069162] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 943.069354] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 943.069528] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 943.069702] env[62383]: INFO nova.compute.manager [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Took 1.13 seconds to destroy the instance on the hypervisor. [ 943.069961] env[62383]: DEBUG oslo.service.loopingcall [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 943.070159] env[62383]: DEBUG nova.compute.manager [-] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 943.070252] env[62383]: DEBUG nova.network.neutron [-] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 943.141804] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.917s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.144619] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.179s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.145442] env[62383]: INFO nova.compute.claims [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.173894] env[62383]: INFO nova.scheduler.client.report [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted allocations for instance 7740a70f-3c95-49aa-b3ec-0e0effd3efcc [ 943.211555] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451983, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.243680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.243680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.243680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.243680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.243680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.245768] env[62383]: INFO nova.compute.manager [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Terminating instance [ 943.365606] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52912396-b990-ff36-818b-8a4b7259ff97, 'name': SearchDatastore_Task, 'duration_secs': 0.069935} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.366397] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-305548cb-9400-46f7-b50c-31779b6effe3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.371371] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 943.371371] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52477d3e-fb16-0be2-18d3-d930eceb98d0" [ 943.371371] env[62383]: _type = "Task" [ 943.371371] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.380062] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52477d3e-fb16-0be2-18d3-d930eceb98d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.565807] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542412} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.566132] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 1ab60ef9-4209-4097-8a2c-a55e3a6684b2/1ab60ef9-4209-4097-8a2c-a55e3a6684b2.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 943.566295] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 943.566742] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ceb53fb-833e-448c-ba97-76d8c7d18889 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.572283] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 943.572283] env[62383]: value = "task-2451984" [ 943.572283] env[62383]: _type = "Task" [ 943.572283] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.580780] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451984, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.684819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78aafd7d-b0b0-4d15-9941-1189e878f36d tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "7740a70f-3c95-49aa-b3ec-0e0effd3efcc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.030s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.712431] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451983, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.714910] env[62383]: DEBUG nova.compute.manager [req-188cf221-bb3e-4381-89a7-a2fdf9bd3b63 req-461cb467-e545-405f-8878-c515ca0c7393 service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Received event network-vif-deleted-f5edd1b5-561e-477f-99c4-66985de210cd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 943.715115] env[62383]: INFO nova.compute.manager [req-188cf221-bb3e-4381-89a7-a2fdf9bd3b63 req-461cb467-e545-405f-8878-c515ca0c7393 service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Neutron deleted interface f5edd1b5-561e-477f-99c4-66985de210cd; detaching it from the instance and deleting it from the info cache [ 943.715286] env[62383]: DEBUG nova.network.neutron [req-188cf221-bb3e-4381-89a7-a2fdf9bd3b63 req-461cb467-e545-405f-8878-c515ca0c7393 service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.750217] env[62383]: DEBUG nova.compute.manager [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 943.750437] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.754103] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e762d088-e954-442f-b704-f6380512e293 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.756991] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "8994780e-1b8f-4464-a303-a1e68206e770" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.757183] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "8994780e-1b8f-4464-a303-a1e68206e770" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.763141] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.763816] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81e6df7e-afb4-4aad-a24f-e3be0d0016b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.769676] env[62383]: DEBUG oslo_vmware.api [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 943.769676] env[62383]: value = "task-2451985" [ 943.769676] env[62383]: _type = "Task" [ 943.769676] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.777741] env[62383]: DEBUG oslo_vmware.api [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.840415] env[62383]: DEBUG nova.network.neutron [-] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.881167] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52477d3e-fb16-0be2-18d3-d930eceb98d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010574} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.881412] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.881693] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 161d6537-fe78-4a42-b8a5-e3d7d78c0154/161d6537-fe78-4a42-b8a5-e3d7d78c0154.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.881960] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f03f1917-2d14-4fb8-b97a-d2a7d79d429e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.888137] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 943.888137] env[62383]: value = "task-2451986" [ 943.888137] env[62383]: _type = "Task" [ 943.888137] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.895748] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.082800] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451984, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073942} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.083145] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.083993] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeca80d-447f-4cfe-83ac-39d45c9dc810 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.107832] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 1ab60ef9-4209-4097-8a2c-a55e3a6684b2/1ab60ef9-4209-4097-8a2c-a55e3a6684b2.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.108190] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bfa71b5-ec3d-409e-a9f8-0dc761fe0505 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.130025] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 944.130025] env[62383]: value = "task-2451987" [ 944.130025] env[62383]: _type = "Task" [ 944.130025] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.140134] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.216340] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451983, 'name': Rename_Task, 'duration_secs': 1.173525} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.216628] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 944.216936] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10260577-487b-40bb-ac33-1f0e5fef08f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.218977] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-769be16d-789e-45ce-846b-a0f69adea54c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.226936] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 944.226936] env[62383]: value = "task-2451988" [ 944.226936] env[62383]: _type = "Task" [ 944.226936] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.234048] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7713c1a-6172-4edd-a4d3-bb7d70fa84f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.254256] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451988, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.260052] env[62383]: DEBUG nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 944.277511] env[62383]: DEBUG nova.compute.manager [req-188cf221-bb3e-4381-89a7-a2fdf9bd3b63 req-461cb467-e545-405f-8878-c515ca0c7393 service nova] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Detach interface failed, port_id=f5edd1b5-561e-477f-99c4-66985de210cd, reason: Instance df5e6f1b-ac01-4ac0-bc84-b49c54c3e771 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 944.292059] env[62383]: DEBUG oslo_vmware.api [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451985, 'name': PowerOffVM_Task, 'duration_secs': 0.200232} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.293060] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.293107] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.293611] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1eae00d4-41e0-472c-8547-6a5dde351209 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.343620] env[62383]: INFO nova.compute.manager [-] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Took 1.27 seconds to deallocate network for instance. [ 944.368381] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.368583] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.368777] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleting the datastore file [datastore2] 81921762-ac51-42d2-83dc-d5b6e904fbb7 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.369395] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6e68ff4-437d-42e0-95df-95a2641d1234 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.378868] env[62383]: DEBUG oslo_vmware.api [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 944.378868] env[62383]: value = "task-2451990" [ 944.378868] env[62383]: _type = "Task" [ 944.378868] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.390127] env[62383]: DEBUG oslo_vmware.api [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.398283] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47376} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.401034] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 161d6537-fe78-4a42-b8a5-e3d7d78c0154/161d6537-fe78-4a42-b8a5-e3d7d78c0154.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.401300] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.402040] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e181ce9a-c274-4b51-b5f2-9572a9b4ca2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.408504] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 944.408504] env[62383]: value = "task-2451991" [ 944.408504] env[62383]: _type = "Task" [ 944.408504] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.421349] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.545901] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6106afc-ac60-4f2c-83f7-78daaece7840 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.554336] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce446fe-db28-4985-a476-488ccf80c661 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.586137] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90108cb4-9f71-4f4b-804d-e31a86e09565 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.593362] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dced24-786d-4452-82e9-02e6e5592540 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.606604] env[62383]: DEBUG nova.compute.provider_tree [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.639836] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451987, 'name': ReconfigVM_Task, 'duration_secs': 0.364095} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.639836] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 1ab60ef9-4209-4097-8a2c-a55e3a6684b2/1ab60ef9-4209-4097-8a2c-a55e3a6684b2.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 944.640502] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee146193-093d-4d65-ae05-e4d1f94e8eb7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.647685] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 944.647685] env[62383]: value = "task-2451992" [ 944.647685] env[62383]: _type = "Task" [ 944.647685] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.658868] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451992, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.738418] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451988, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.799950] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.850996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.889994] env[62383]: DEBUG oslo_vmware.api [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2451990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228654} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.890258] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.890447] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.890630] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.890814] env[62383]: INFO nova.compute.manager [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 944.891079] env[62383]: DEBUG oslo.service.loopingcall [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.891289] env[62383]: DEBUG nova.compute.manager [-] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 944.891373] env[62383]: DEBUG nova.network.neutron [-] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.918911] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076383} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.920193] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.920193] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5d17ad-5650-47f2-996a-12116c08709c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.949671] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 161d6537-fe78-4a42-b8a5-e3d7d78c0154/161d6537-fe78-4a42-b8a5-e3d7d78c0154.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.952033] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59826c0e-4884-49c1-a83f-10e61a208097 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.971488] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 944.971488] env[62383]: value = "task-2451993" [ 944.971488] env[62383]: _type = "Task" [ 944.971488] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.979017] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.110592] env[62383]: DEBUG nova.scheduler.client.report [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.160161] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451992, 'name': Rename_Task, 'duration_secs': 0.157788} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.160327] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.160561] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d490a15-96bc-44ad-b785-ce45c09570a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.167443] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 945.167443] env[62383]: value = "task-2451994" [ 945.167443] env[62383]: _type = "Task" [ 945.167443] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.177313] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451994, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.239081] env[62383]: DEBUG oslo_vmware.api [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451988, 'name': PowerOnVM_Task, 'duration_secs': 0.581021} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.239081] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 945.239081] env[62383]: INFO nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Took 10.40 seconds to spawn the instance on the hypervisor. [ 945.239081] env[62383]: DEBUG nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.239739] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a71195-9373-41da-8ff3-51f921c7d769 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.482028] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451993, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.616549] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.618029] env[62383]: DEBUG nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.619925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.969s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.620673] env[62383]: DEBUG nova.objects.instance [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'resources' on Instance uuid 23d24da6-c7d8-4d6a-8442-a1066505aab1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.648296] env[62383]: DEBUG nova.network.neutron [-] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.677763] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451994, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.742660] env[62383]: DEBUG nova.compute.manager [req-e67365d0-1337-4eab-9b33-399d0324a7e9 req-19e1b2f9-e5a6-4085-a3ae-a9f8e4602103 service nova] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Received event network-vif-deleted-913ca293-96ad-478e-96f7-b0b1697a3b0d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 945.755322] env[62383]: INFO nova.compute.manager [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Took 29.77 seconds to build instance. [ 945.982097] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451993, 'name': ReconfigVM_Task, 'duration_secs': 0.750579} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.982465] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 161d6537-fe78-4a42-b8a5-e3d7d78c0154/161d6537-fe78-4a42-b8a5-e3d7d78c0154.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.983370] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b5ddfd8a-e992-4157-821d-657dd4bab739 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.989802] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 945.989802] env[62383]: value = "task-2451995" [ 945.989802] env[62383]: _type = "Task" [ 945.989802] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.002267] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451995, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.122874] env[62383]: DEBUG nova.compute.utils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 946.127468] env[62383]: DEBUG nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.127665] env[62383]: DEBUG nova.network.neutron [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.151676] env[62383]: INFO nova.compute.manager [-] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Took 1.26 seconds to deallocate network for instance. [ 946.180157] env[62383]: DEBUG oslo_vmware.api [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451994, 'name': PowerOnVM_Task, 'duration_secs': 0.707145} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.180331] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.180529] env[62383]: INFO nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Took 8.88 seconds to spawn the instance on the hypervisor. [ 946.180818] env[62383]: DEBUG nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.181484] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cbc13b-6343-471f-98a0-0963d413b511 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.187317] env[62383]: DEBUG nova.policy [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc391aae95a8405bab7801175514ac8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c15955328966463fa09401a270d95fe0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 946.258153] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6587f7b1-9da2-4365-916c-ad65a9fb8a5c tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.283s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.465453] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9c7377-9428-450c-8d00-4482e092813e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.473353] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b616a1-4845-411d-80c0-191bd00e9478 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.508791] env[62383]: DEBUG nova.network.neutron [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Successfully created port: d7087f7e-68a8-4f14-b9a5-db8db1732dc9 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.514131] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19b5650-1bb8-4a1a-b0c6-28e7c8f17292 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.521443] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451995, 'name': Rename_Task, 'duration_secs': 0.179696} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.523460] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.523754] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66f6d621-79c7-410f-84f9-abaffc1cb8ce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.526044] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7e008a-4a52-4844-a48f-9b7635039c74 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.539498] env[62383]: DEBUG nova.compute.provider_tree [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.541988] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 946.541988] env[62383]: value = "task-2451996" [ 946.541988] env[62383]: _type = "Task" [ 946.541988] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.550494] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451996, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.630962] env[62383]: DEBUG nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.661430] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.703793] env[62383]: INFO nova.compute.manager [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Took 28.70 seconds to build instance. [ 947.044039] env[62383]: DEBUG nova.scheduler.client.report [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.057584] env[62383]: DEBUG oslo_vmware.api [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2451996, 'name': PowerOnVM_Task, 'duration_secs': 0.518007} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.057741] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.058116] env[62383]: INFO nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Took 7.29 seconds to spawn the instance on the hypervisor. [ 947.058116] env[62383]: DEBUG nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.058887] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5fb641-4df0-4d61-8138-2742ba77cea1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.205733] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b79ce85b-7caa-43df-84b0-7b6750097045 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.211s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.367241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "f193af26-eba8-471f-a00e-0afa9b190d0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.367605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.367857] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "f193af26-eba8-471f-a00e-0afa9b190d0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.368100] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.368287] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.371034] env[62383]: INFO nova.compute.manager [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Terminating instance [ 947.553439] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.933s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.555593] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.836s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.557119] env[62383]: INFO nova.compute.claims [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.576721] env[62383]: INFO nova.compute.manager [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Took 25.32 seconds to build instance. [ 947.585437] env[62383]: INFO nova.scheduler.client.report [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted allocations for instance 23d24da6-c7d8-4d6a-8442-a1066505aab1 [ 947.642382] env[62383]: DEBUG nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 947.664796] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 947.665090] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.665279] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.665472] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.665634] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.665786] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 947.666021] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 947.666194] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 947.666447] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 947.666656] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 947.666848] env[62383]: DEBUG nova.virt.hardware [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 947.668231] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567a46d7-ffdf-44ca-b99d-516a97090f14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.676396] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b64dd61-dd8d-4833-aa4a-a844a451e139 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.875929] env[62383]: DEBUG nova.compute.manager [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 947.876191] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.877155] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecb812a-9a51-464f-9152-ec927f82e216 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.885107] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.885354] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd4443da-c403-4de1-97fa-ee9d1809dd8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.892432] env[62383]: DEBUG oslo_vmware.api [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 947.892432] env[62383]: value = "task-2451997" [ 947.892432] env[62383]: _type = "Task" [ 947.892432] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.900818] env[62383]: DEBUG oslo_vmware.api [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.931561] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.931831] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.932050] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.932244] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.932415] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.934568] env[62383]: INFO nova.compute.manager [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Terminating instance [ 947.953648] env[62383]: DEBUG nova.compute.manager [req-702223dd-c710-4a15-a401-521fc1b607b0 req-3f26dc78-a92f-4787-9c77-bd2c8741d23e service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Received event network-vif-plugged-d7087f7e-68a8-4f14-b9a5-db8db1732dc9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 947.953871] env[62383]: DEBUG oslo_concurrency.lockutils [req-702223dd-c710-4a15-a401-521fc1b607b0 req-3f26dc78-a92f-4787-9c77-bd2c8741d23e service nova] Acquiring lock "4136466e-d9c6-448a-b392-415bb7c44a8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.954086] env[62383]: DEBUG oslo_concurrency.lockutils [req-702223dd-c710-4a15-a401-521fc1b607b0 req-3f26dc78-a92f-4787-9c77-bd2c8741d23e service nova] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.954459] env[62383]: DEBUG oslo_concurrency.lockutils [req-702223dd-c710-4a15-a401-521fc1b607b0 req-3f26dc78-a92f-4787-9c77-bd2c8741d23e service nova] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.954459] env[62383]: DEBUG nova.compute.manager [req-702223dd-c710-4a15-a401-521fc1b607b0 req-3f26dc78-a92f-4787-9c77-bd2c8741d23e service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] No waiting events found dispatching network-vif-plugged-d7087f7e-68a8-4f14-b9a5-db8db1732dc9 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.954589] env[62383]: WARNING nova.compute.manager [req-702223dd-c710-4a15-a401-521fc1b607b0 req-3f26dc78-a92f-4787-9c77-bd2c8741d23e service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Received unexpected event network-vif-plugged-d7087f7e-68a8-4f14-b9a5-db8db1732dc9 for instance with vm_state building and task_state spawning. [ 948.036101] env[62383]: DEBUG nova.network.neutron [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Successfully updated port: d7087f7e-68a8-4f14-b9a5-db8db1732dc9 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.079404] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0c2c504e-a29a-4f4c-a06c-1a902dae4617 tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.836s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.093211] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b6c66a72-32bd-4207-ac73-c2eebcc8a7d8 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "23d24da6-c7d8-4d6a-8442-a1066505aab1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.442s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.404268] env[62383]: DEBUG oslo_vmware.api [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2451997, 'name': PowerOffVM_Task, 'duration_secs': 0.193264} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.404575] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.404779] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.405100] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6892a2e8-953f-4d33-989c-184668e2809f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.438534] env[62383]: DEBUG nova.compute.manager [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 948.438819] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.439957] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99dd9fc-0cd7-4eed-b911-3404936a88ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.447584] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.447888] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80912a95-9829-438a-a5ec-7539ce4d3f3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.453991] env[62383]: DEBUG oslo_vmware.api [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 948.453991] env[62383]: value = "task-2451999" [ 948.453991] env[62383]: _type = "Task" [ 948.453991] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.462794] env[62383]: DEBUG oslo_vmware.api [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.471013] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.471267] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.471461] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Deleting the datastore file [datastore2] f193af26-eba8-471f-a00e-0afa9b190d0b {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.472066] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f061bb5f-ac52-4d63-80a1-02c9386239c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.477778] env[62383]: DEBUG oslo_vmware.api [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for the task: (returnval){ [ 948.477778] env[62383]: value = "task-2452000" [ 948.477778] env[62383]: _type = "Task" [ 948.477778] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.486082] env[62383]: DEBUG oslo_vmware.api [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2452000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.538038] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-4136466e-d9c6-448a-b392-415bb7c44a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.538287] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-4136466e-d9c6-448a-b392-415bb7c44a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.538494] env[62383]: DEBUG nova.network.neutron [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 948.626458] env[62383]: DEBUG nova.compute.manager [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 948.627466] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccdbdf4-40c3-42f6-be43-26b20d8f3ac6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.954579] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1643f212-5a77-4098-9835-71609ff533df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.971088] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3b1893-8549-4ad2-a59c-d167172dbc0c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.974255] env[62383]: DEBUG oslo_vmware.api [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2451999, 'name': PowerOffVM_Task, 'duration_secs': 0.283226} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.974536] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.974702] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.975289] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c908afc-8e46-49ae-a10a-892dfbde69db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.005885] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f023190b-0929-40cc-8fe4-ea25a50869c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.013806] env[62383]: DEBUG oslo_vmware.api [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Task: {'id': task-2452000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167183} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.015986] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.016199] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.016380] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.016622] env[62383]: INFO nova.compute.manager [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 949.016852] env[62383]: DEBUG oslo.service.loopingcall [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.017145] env[62383]: DEBUG nova.compute.manager [-] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.017245] env[62383]: DEBUG nova.network.neutron [-] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.019842] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59c0d15-67a3-4b1c-bd21-9828c985fcf7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.033056] env[62383]: DEBUG nova.compute.provider_tree [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.058098] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.058388] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.058621] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleting the datastore file [datastore1] 1ab60ef9-4209-4097-8a2c-a55e3a6684b2 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.058945] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5966338-ead8-4c23-a4cd-0949befd5323 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.065252] env[62383]: DEBUG oslo_vmware.api [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 949.065252] env[62383]: value = "task-2452002" [ 949.065252] env[62383]: _type = "Task" [ 949.065252] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.073677] env[62383]: DEBUG oslo_vmware.api [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452002, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.084250] env[62383]: DEBUG nova.network.neutron [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.141657] env[62383]: INFO nova.compute.manager [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] instance snapshotting [ 949.146616] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd748e3-0678-4720-87ab-44c0d6f7af28 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.165846] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2afdb9f8-87cd-4405-82c9-266569e2e7e0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.298863] env[62383]: DEBUG nova.network.neutron [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Updating instance_info_cache with network_info: [{"id": "d7087f7e-68a8-4f14-b9a5-db8db1732dc9", "address": "fa:16:3e:f3:b2:5e", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7087f7e-68", "ovs_interfaceid": "d7087f7e-68a8-4f14-b9a5-db8db1732dc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.536676] env[62383]: DEBUG nova.scheduler.client.report [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.575387] env[62383]: DEBUG oslo_vmware.api [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452002, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13605} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.575645] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.575829] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.576009] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.576194] env[62383]: INFO nova.compute.manager [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 949.576436] env[62383]: DEBUG oslo.service.loopingcall [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.576625] env[62383]: DEBUG nova.compute.manager [-] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.576718] env[62383]: DEBUG nova.network.neutron [-] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.677916] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 949.678265] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-cc9497ad-61ab-475d-88af-c3a9db68d364 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.686467] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 949.686467] env[62383]: value = "task-2452003" [ 949.686467] env[62383]: _type = "Task" [ 949.686467] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.694527] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452003, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.801985] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-4136466e-d9c6-448a-b392-415bb7c44a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.801985] env[62383]: DEBUG nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Instance network_info: |[{"id": "d7087f7e-68a8-4f14-b9a5-db8db1732dc9", "address": "fa:16:3e:f3:b2:5e", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7087f7e-68", "ovs_interfaceid": "d7087f7e-68a8-4f14-b9a5-db8db1732dc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 949.802637] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:b2:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7087f7e-68a8-4f14-b9a5-db8db1732dc9', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.810945] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating folder: Project (c15955328966463fa09401a270d95fe0). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.811262] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c10599b9-50cb-4bf0-8164-8ec34d16b512 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.822892] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created folder: Project (c15955328966463fa09401a270d95fe0) in parent group-v496304. [ 949.823101] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating folder: Instances. Parent ref: group-v496534. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 949.823340] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a0d47b64-aebb-4abf-822d-1ee8aebf8a68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.833256] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created folder: Instances in parent group-v496534. [ 949.833735] env[62383]: DEBUG oslo.service.loopingcall [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.833808] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.834063] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-894d1449-2910-4144-adf4-ef1e00731505 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.854872] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.854872] env[62383]: value = "task-2452006" [ 949.854872] env[62383]: _type = "Task" [ 949.854872] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.864174] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452006, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.891510] env[62383]: DEBUG nova.network.neutron [-] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.979585] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.979975] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.994446] env[62383]: DEBUG nova.compute.manager [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Received event network-changed-d7087f7e-68a8-4f14-b9a5-db8db1732dc9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 949.994705] env[62383]: DEBUG nova.compute.manager [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Refreshing instance network info cache due to event network-changed-d7087f7e-68a8-4f14-b9a5-db8db1732dc9. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 949.994923] env[62383]: DEBUG oslo_concurrency.lockutils [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] Acquiring lock "refresh_cache-4136466e-d9c6-448a-b392-415bb7c44a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.995193] env[62383]: DEBUG oslo_concurrency.lockutils [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] Acquired lock "refresh_cache-4136466e-d9c6-448a-b392-415bb7c44a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.995413] env[62383]: DEBUG nova.network.neutron [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Refreshing network info cache for port d7087f7e-68a8-4f14-b9a5-db8db1732dc9 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.041512] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.042054] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 950.044610] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.314s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.046246] env[62383]: INFO nova.compute.claims [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.195992] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452003, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.328557] env[62383]: DEBUG nova.network.neutron [-] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.365728] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452006, 'name': CreateVM_Task, 'duration_secs': 0.348768} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.365945] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.366629] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.366866] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.367139] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.367453] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbeb271a-c4d9-434e-a220-f2595c280491 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.373857] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 950.373857] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525ac642-675f-5202-54b6-fe273fbb423d" [ 950.373857] env[62383]: _type = "Task" [ 950.373857] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.381875] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525ac642-675f-5202-54b6-fe273fbb423d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.394779] env[62383]: INFO nova.compute.manager [-] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Took 1.38 seconds to deallocate network for instance. [ 950.484056] env[62383]: DEBUG nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 950.552584] env[62383]: DEBUG nova.compute.utils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 950.555142] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 950.555320] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 950.621397] env[62383]: DEBUG nova.policy [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eeedfa485774ec39dd7aba217199d6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd09227ae56ba4875954d0107ae5cf5f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 950.701057] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452003, 'name': CreateSnapshot_Task, 'duration_secs': 0.636072} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.701417] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 950.702541] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2369fc68-ce93-46a3-8a89-eb7fe7620823 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.769964] env[62383]: DEBUG nova.network.neutron [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Updated VIF entry in instance network info cache for port d7087f7e-68a8-4f14-b9a5-db8db1732dc9. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.770404] env[62383]: DEBUG nova.network.neutron [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Updating instance_info_cache with network_info: [{"id": "d7087f7e-68a8-4f14-b9a5-db8db1732dc9", "address": "fa:16:3e:f3:b2:5e", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7087f7e-68", "ovs_interfaceid": "d7087f7e-68a8-4f14-b9a5-db8db1732dc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.831552] env[62383]: INFO nova.compute.manager [-] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Took 1.25 seconds to deallocate network for instance. [ 950.886008] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525ac642-675f-5202-54b6-fe273fbb423d, 'name': SearchDatastore_Task, 'duration_secs': 0.010181} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.886333] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.886666] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 950.886795] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.887300] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.887524] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 950.887800] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcaa00c3-49b3-412c-af1a-7ade9f1743e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.899587] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 950.899685] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 950.901266] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.901503] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57e39695-a0d8-4cb7-8586-3db1a0a0ef57 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.910351] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 950.910351] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52784c20-8334-b27f-3b90-ba7d016cae27" [ 950.910351] env[62383]: _type = "Task" [ 950.910351] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.918450] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52784c20-8334-b27f-3b90-ba7d016cae27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.953572] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Successfully created port: 20344f03-4957-4f0a-a1bb-5493c7942654 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.007660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.056027] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 951.227345] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 951.229922] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1887adb9-852d-4693-8e8f-0119c13fd43f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.237773] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 951.237773] env[62383]: value = "task-2452007" [ 951.237773] env[62383]: _type = "Task" [ 951.237773] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.248613] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452007, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.274545] env[62383]: DEBUG oslo_concurrency.lockutils [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] Releasing lock "refresh_cache-4136466e-d9c6-448a-b392-415bb7c44a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.274801] env[62383]: DEBUG nova.compute.manager [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Received event network-vif-deleted-8e058335-a5bd-4e58-ab6b-9d03b28c03c3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 951.274983] env[62383]: DEBUG nova.compute.manager [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Received event network-vif-deleted-d1287bdf-752f-4429-ad35-e47c6ef4804a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 951.275164] env[62383]: INFO nova.compute.manager [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Neutron deleted interface d1287bdf-752f-4429-ad35-e47c6ef4804a; detaching it from the instance and deleting it from the info cache [ 951.275332] env[62383]: DEBUG nova.network.neutron [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.342784] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.383123] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468b6b33-0bcd-4563-b54d-8c35ddba0f43 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.391620] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e910dd0-f26a-483e-ab1b-1d2394604bf0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.433929] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fcb8c0-58cf-401e-8dd8-7d6b31915722 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.444440] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52784c20-8334-b27f-3b90-ba7d016cae27, 'name': SearchDatastore_Task, 'duration_secs': 0.025894} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.448998] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d744871-73be-4436-b0fe-6b5db97cc05b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.452502] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d941fd1c-0176-4602-9068-ef3d5e783881 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.459914] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 951.459914] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b9ee09-d1c1-4e46-e8cd-2a696da4538e" [ 951.459914] env[62383]: _type = "Task" [ 951.459914] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.467996] env[62383]: DEBUG nova.compute.provider_tree [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.480484] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b9ee09-d1c1-4e46-e8cd-2a696da4538e, 'name': SearchDatastore_Task, 'duration_secs': 0.010722} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.480750] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.480998] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4136466e-d9c6-448a-b392-415bb7c44a8d/4136466e-d9c6-448a-b392-415bb7c44a8d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 951.483830] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db70dfda-f37a-4560-b6ee-65b4a2582f9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.488498] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 951.488498] env[62383]: value = "task-2452008" [ 951.488498] env[62383]: _type = "Task" [ 951.488498] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.496867] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.752011] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452007, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.778069] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44e435c6-81da-4240-a7a4-fff78e724985 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.788445] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0c9f5a-7a99-4aff-9175-7cfc7d18f01a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.823022] env[62383]: DEBUG nova.compute.manager [req-90a32e0b-d47f-40c8-bfe2-c245b75e3505 req-6b28e5ab-70b6-49e7-9566-48f7cf275d4b service nova] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Detach interface failed, port_id=d1287bdf-752f-4429-ad35-e47c6ef4804a, reason: Instance 1ab60ef9-4209-4097-8a2c-a55e3a6684b2 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 951.974351] env[62383]: DEBUG nova.scheduler.client.report [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.999320] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481053} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.999443] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4136466e-d9c6-448a-b392-415bb7c44a8d/4136466e-d9c6-448a-b392-415bb7c44a8d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 951.999619] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 951.999878] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d10814ee-0d5f-4ffb-afba-fb9b208d01cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.008810] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 952.008810] env[62383]: value = "task-2452009" [ 952.008810] env[62383]: _type = "Task" [ 952.008810] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.017321] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.068957] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 952.096029] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 952.096267] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.096421] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 952.096597] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.096737] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 952.096923] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 952.097179] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 952.097340] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 952.097505] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 952.097663] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 952.098020] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 952.098715] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d29aad-409a-4a73-a3ae-d9e778652880 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.107116] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebc8e61-e7d4-4c7a-9f98-6b560d9c8851 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.248523] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452007, 'name': CloneVM_Task} progress is 95%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.331284] env[62383]: DEBUG nova.compute.manager [req-643b78ab-35f9-40e9-8b7f-26bbc8fe26ca req-ffa9d096-6011-431e-a2d4-26192685e556 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Received event network-vif-plugged-20344f03-4957-4f0a-a1bb-5493c7942654 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 952.331481] env[62383]: DEBUG oslo_concurrency.lockutils [req-643b78ab-35f9-40e9-8b7f-26bbc8fe26ca req-ffa9d096-6011-431e-a2d4-26192685e556 service nova] Acquiring lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.331691] env[62383]: DEBUG oslo_concurrency.lockutils [req-643b78ab-35f9-40e9-8b7f-26bbc8fe26ca req-ffa9d096-6011-431e-a2d4-26192685e556 service nova] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.331861] env[62383]: DEBUG oslo_concurrency.lockutils [req-643b78ab-35f9-40e9-8b7f-26bbc8fe26ca req-ffa9d096-6011-431e-a2d4-26192685e556 service nova] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.332339] env[62383]: DEBUG nova.compute.manager [req-643b78ab-35f9-40e9-8b7f-26bbc8fe26ca req-ffa9d096-6011-431e-a2d4-26192685e556 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] No waiting events found dispatching network-vif-plugged-20344f03-4957-4f0a-a1bb-5493c7942654 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 952.332585] env[62383]: WARNING nova.compute.manager [req-643b78ab-35f9-40e9-8b7f-26bbc8fe26ca req-ffa9d096-6011-431e-a2d4-26192685e556 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Received unexpected event network-vif-plugged-20344f03-4957-4f0a-a1bb-5493c7942654 for instance with vm_state building and task_state spawning. [ 952.426636] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Successfully updated port: 20344f03-4957-4f0a-a1bb-5493c7942654 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.479226] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.479770] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 952.483348] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.863s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.483570] env[62383]: DEBUG nova.objects.instance [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lazy-loading 'resources' on Instance uuid d0311c29-e1ed-446f-a52b-1687b9561740 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.518452] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062753} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.518698] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 952.519522] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5090c4b6-787c-4c40-aabf-49632a9f1322 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.542446] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 4136466e-d9c6-448a-b392-415bb7c44a8d/4136466e-d9c6-448a-b392-415bb7c44a8d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.542687] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b026dba0-905f-4916-972d-174bf6911053 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.563559] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 952.563559] env[62383]: value = "task-2452010" [ 952.563559] env[62383]: _type = "Task" [ 952.563559] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.571807] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452010, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.748715] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452007, 'name': CloneVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.931871] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "refresh_cache-9d2e3772-e0b2-450a-9dc8-725c4a05cde4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 952.932151] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "refresh_cache-9d2e3772-e0b2-450a-9dc8-725c4a05cde4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.932374] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.986504] env[62383]: DEBUG nova.compute.utils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 952.987861] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.988045] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 953.027904] env[62383]: DEBUG nova.policy [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7eeedfa485774ec39dd7aba217199d6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd09227ae56ba4875954d0107ae5cf5f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 953.075686] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452010, 'name': ReconfigVM_Task, 'duration_secs': 0.293094} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.078011] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 4136466e-d9c6-448a-b392-415bb7c44a8d/4136466e-d9c6-448a-b392-415bb7c44a8d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.078011] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69b55db1-90a1-47a4-952f-40559c8fe353 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.087441] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 953.087441] env[62383]: value = "task-2452011" [ 953.087441] env[62383]: _type = "Task" [ 953.087441] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.095637] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452011, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.250258] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452007, 'name': CloneVM_Task, 'duration_secs': 1.510821} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.253136] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Created linked-clone VM from snapshot [ 953.254469] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328d23ed-3c8c-49ae-a126-1d33e92cd293 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.262030] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Uploading image 4325a252-7a3e-46c9-a3f8-4a42d9178d06 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 953.275918] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 953.276239] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b27c66cc-3f9c-4b35-a1a8-901e7707dbbf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.284805] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 953.284805] env[62383]: value = "task-2452012" [ 953.284805] env[62383]: _type = "Task" [ 953.284805] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.288317] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Successfully created port: dac828f5-7985-4dd4-9f06-63931ede1877 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.296728] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452012, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.359123] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02918c07-0b0c-47c5-a2e0-578500c9b932 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.366400] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8bf34d8-95ac-4293-ae03-180ae4a69c16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.397983] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527df872-b9a5-40e4-bc7b-90e8b04a165f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.405671] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc04eeb-1eec-402b-bd8e-45658f34f825 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.421317] env[62383]: DEBUG nova.compute.provider_tree [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.485017] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.493853] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 953.597186] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452011, 'name': Rename_Task, 'duration_secs': 0.159148} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.597693] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 953.597951] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb6371f9-fd46-4216-9cbd-cdf4a063f1bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.603352] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 953.603352] env[62383]: value = "task-2452013" [ 953.603352] env[62383]: _type = "Task" [ 953.603352] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.613120] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.702205] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Updating instance_info_cache with network_info: [{"id": "20344f03-4957-4f0a-a1bb-5493c7942654", "address": "fa:16:3e:d1:5e:c5", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20344f03-49", "ovs_interfaceid": "20344f03-4957-4f0a-a1bb-5493c7942654", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.794592] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452012, 'name': Destroy_Task, 'duration_secs': 0.304096} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.794858] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Destroyed the VM [ 953.795109] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 953.795370] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-05c98eab-e72d-48fe-b74c-f3aacab9d2d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.801488] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 953.801488] env[62383]: value = "task-2452014" [ 953.801488] env[62383]: _type = "Task" [ 953.801488] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.809304] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452014, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.925179] env[62383]: DEBUG nova.scheduler.client.report [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.115315] env[62383]: DEBUG oslo_vmware.api [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452013, 'name': PowerOnVM_Task, 'duration_secs': 0.499289} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.115570] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 954.115778] env[62383]: INFO nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Took 6.47 seconds to spawn the instance on the hypervisor. [ 954.115962] env[62383]: DEBUG nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 954.116759] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bd769b-8987-44de-b083-6e2039cca1b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.205224] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "refresh_cache-9d2e3772-e0b2-450a-9dc8-725c4a05cde4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.205224] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Instance network_info: |[{"id": "20344f03-4957-4f0a-a1bb-5493c7942654", "address": "fa:16:3e:d1:5e:c5", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20344f03-49", "ovs_interfaceid": "20344f03-4957-4f0a-a1bb-5493c7942654", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 954.205673] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:5e:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20344f03-4957-4f0a-a1bb-5493c7942654', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.215874] env[62383]: DEBUG oslo.service.loopingcall [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.216140] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.216413] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7206a71-1977-417a-9a90-bfc5a355c8d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.237853] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.237853] env[62383]: value = "task-2452015" [ 954.237853] env[62383]: _type = "Task" [ 954.237853] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.246541] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452015, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.312284] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452014, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.360159] env[62383]: DEBUG nova.compute.manager [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Received event network-changed-20344f03-4957-4f0a-a1bb-5493c7942654 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 954.360159] env[62383]: DEBUG nova.compute.manager [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Refreshing instance network info cache due to event network-changed-20344f03-4957-4f0a-a1bb-5493c7942654. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 954.360159] env[62383]: DEBUG oslo_concurrency.lockutils [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] Acquiring lock "refresh_cache-9d2e3772-e0b2-450a-9dc8-725c4a05cde4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.360159] env[62383]: DEBUG oslo_concurrency.lockutils [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] Acquired lock "refresh_cache-9d2e3772-e0b2-450a-9dc8-725c4a05cde4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.360492] env[62383]: DEBUG nova.network.neutron [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Refreshing network info cache for port 20344f03-4957-4f0a-a1bb-5493c7942654 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.430660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.947s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.433441] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 24.099s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.455869] env[62383]: INFO nova.scheduler.client.report [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Deleted allocations for instance d0311c29-e1ed-446f-a52b-1687b9561740 [ 954.503431] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 954.527875] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 954.528066] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.528230] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 954.528422] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.528572] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 954.528782] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 954.528955] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 954.529195] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 954.529319] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 954.529486] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 954.529660] env[62383]: DEBUG nova.virt.hardware [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 954.530544] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcea188c-aa3b-4e71-a821-a4c3f932818f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.538546] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228f5c7e-3254-4b3c-b002-5a3d0f2fa3dd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.632909] env[62383]: INFO nova.compute.manager [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Took 27.70 seconds to build instance. [ 954.748984] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452015, 'name': CreateVM_Task, 'duration_secs': 0.452896} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.749191] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.750701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.750873] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.751238] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 954.751469] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c87be444-7c3b-4423-b1c1-784ff4c6ae53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.756034] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 954.756034] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52eaf6a3-d676-622f-3606-79a7352221e1" [ 954.756034] env[62383]: _type = "Task" [ 954.756034] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.763384] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52eaf6a3-d676-622f-3606-79a7352221e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.811968] env[62383]: DEBUG oslo_vmware.api [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452014, 'name': RemoveSnapshot_Task, 'duration_secs': 0.666382} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.812072] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 954.905750] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Successfully updated port: dac828f5-7985-4dd4-9f06-63931ede1877 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.963670] env[62383]: DEBUG oslo_concurrency.lockutils [None req-b1309ca8-10e6-4ede-aaeb-da62ddbfa4f7 tempest-ServersTestBootFromVolume-366645730 tempest-ServersTestBootFromVolume-366645730-project-member] Lock "d0311c29-e1ed-446f-a52b-1687b9561740" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.634s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.093267] env[62383]: DEBUG nova.network.neutron [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Updated VIF entry in instance network info cache for port 20344f03-4957-4f0a-a1bb-5493c7942654. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.093636] env[62383]: DEBUG nova.network.neutron [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Updating instance_info_cache with network_info: [{"id": "20344f03-4957-4f0a-a1bb-5493c7942654", "address": "fa:16:3e:d1:5e:c5", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20344f03-49", "ovs_interfaceid": "20344f03-4957-4f0a-a1bb-5493c7942654", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.135453] env[62383]: DEBUG oslo_concurrency.lockutils [None req-304ed387-501b-41cc-8d4c-a4b74f4798b8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.220s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.266638] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52eaf6a3-d676-622f-3606-79a7352221e1, 'name': SearchDatastore_Task, 'duration_secs': 0.010226} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.266923] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.267168] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.267413] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.267801] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.267801] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.267986] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-650c34d4-a3fa-4797-9ded-c018c973ed3f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.276027] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.276201] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.276885] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1b0812e-8045-4ef2-9e24-4c5a57e317f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.281844] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 955.281844] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6da19-e886-a3e7-300c-5be22db42ea5" [ 955.281844] env[62383]: _type = "Task" [ 955.281844] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.289536] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6da19-e886-a3e7-300c-5be22db42ea5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.317046] env[62383]: WARNING nova.compute.manager [None req-eed45bfe-f7d9-4e83-9872-a2db0c0e553c tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Image not found during snapshot: nova.exception.ImageNotFound: Image 4325a252-7a3e-46c9-a3f8-4a42d9178d06 could not be found. [ 955.409900] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "refresh_cache-362da311-fa2b-435d-b972-155a3ac22cbb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.410179] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "refresh_cache-362da311-fa2b-435d-b972-155a3ac22cbb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.410405] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.463791] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4136466e-d9c6-448a-b392-415bb7c44a8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.464036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.464317] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4136466e-d9c6-448a-b392-415bb7c44a8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.464516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.464688] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.466994] env[62383]: INFO nova.compute.manager [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Terminating instance [ 955.470071] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.470228] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 152567ba-f24c-4674-b06e-98c76a3da324 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.470356] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c2fee51e-3cc9-421c-bfe5-b324a5b14197 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.470476] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 67f05a2b-f323-4e4a-ac13-7f4745593be0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.470592] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8bd05dac-7aa2-44c5-8752-6045c01d213d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.470731] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1e367665-1d4b-4686-ac79-c946423c1762 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 955.470848] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 4cd9c7be-c5f4-460b-a9e2-e8f778076947 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.470953] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2f028680-8db4-474a-8f24-880c4702877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.471082] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1b025655-acad-4b70-9e1a-489683cafb7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.471203] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance eedc7859-3882-4837-9419-f9edce5f12fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.471395] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c56464dd-63af-4686-b666-d0ac2df01ec1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.471547] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 81921762-ac51-42d2-83dc-d5b6e904fbb7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 955.471675] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c117e858-696f-43dc-9182-70380214737f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 955.471791] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance eb632e2d-b71e-446d-83a2-0bab1d823d27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.472353] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance df5e6f1b-ac01-4ac0-bc84-b49c54c3e771 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 955.472438] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance f193af26-eba8-471f-a00e-0afa9b190d0b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 955.472571] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1ab60ef9-4209-4097-8a2c-a55e3a6684b2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 955.472803] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 161d6537-fe78-4a42-b8a5-e3d7d78c0154 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.472803] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 4136466e-d9c6-448a-b392-415bb7c44a8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.472973] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9d2e3772-e0b2-450a-9dc8-725c4a05cde4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.472973] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 362da311-fa2b-435d-b972-155a3ac22cbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 955.596355] env[62383]: DEBUG oslo_concurrency.lockutils [req-e54a9bfb-4496-4e33-9a83-bad2292237c7 req-57bfdd63-7a5b-4012-99af-c77c3cb4b2a9 service nova] Releasing lock "refresh_cache-9d2e3772-e0b2-450a-9dc8-725c4a05cde4" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.796132] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6da19-e886-a3e7-300c-5be22db42ea5, 'name': SearchDatastore_Task, 'duration_secs': 0.011343} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.796939] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8f356e9-d512-49db-a2fc-0eb63e9f7738 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.802039] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 955.802039] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f9b81d-a8bc-3f04-4818-78141f9e50c8" [ 955.802039] env[62383]: _type = "Task" [ 955.802039] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.809862] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f9b81d-a8bc-3f04-4818-78141f9e50c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.957186] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.974988] env[62383]: DEBUG nova.compute.manager [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 955.975221] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 955.975919] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 4e5bd3ee-605f-4770-b658-9cbc3d0010ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 955.977676] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe53776-5f7a-438f-bd9f-4c443143853c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.999531] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.999786] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f560eb4-1bec-469f-b5b4-88a45074404b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.010210] env[62383]: DEBUG oslo_vmware.api [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 956.010210] env[62383]: value = "task-2452016" [ 956.010210] env[62383]: _type = "Task" [ 956.010210] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.021917] env[62383]: DEBUG oslo_vmware.api [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452016, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.231640] env[62383]: DEBUG nova.network.neutron [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Updating instance_info_cache with network_info: [{"id": "dac828f5-7985-4dd4-9f06-63931ede1877", "address": "fa:16:3e:76:b4:00", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdac828f5-79", "ovs_interfaceid": "dac828f5-7985-4dd4-9f06-63931ede1877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.316357] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f9b81d-a8bc-3f04-4818-78141f9e50c8, 'name': SearchDatastore_Task, 'duration_secs': 0.009799} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.316357] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.316357] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9d2e3772-e0b2-450a-9dc8-725c4a05cde4/9d2e3772-e0b2-450a-9dc8-725c4a05cde4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.316357] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-860fbf47-2c82-43f4-8394-d8863e35ac63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.322370] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 956.322370] env[62383]: value = "task-2452017" [ 956.322370] env[62383]: _type = "Task" [ 956.322370] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.332059] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452017, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.423026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.423026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.423026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.423026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.423026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.424291] env[62383]: INFO nova.compute.manager [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Terminating instance [ 956.495461] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 956.525634] env[62383]: DEBUG oslo_vmware.api [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452016, 'name': PowerOffVM_Task, 'duration_secs': 0.359721} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.526264] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.526653] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.527063] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45e35348-86c0-4431-b2ab-ef41f8df5628 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.606024] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.606024] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.606024] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleting the datastore file [datastore2] 4136466e-d9c6-448a-b392-415bb7c44a8d {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.606024] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae039258-ecf0-41f0-94f3-050033c223da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.613176] env[62383]: DEBUG nova.compute.manager [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Received event network-vif-plugged-dac828f5-7985-4dd4-9f06-63931ede1877 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 956.613176] env[62383]: DEBUG oslo_concurrency.lockutils [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] Acquiring lock "362da311-fa2b-435d-b972-155a3ac22cbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.613176] env[62383]: DEBUG oslo_concurrency.lockutils [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] Lock "362da311-fa2b-435d-b972-155a3ac22cbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.613176] env[62383]: DEBUG oslo_concurrency.lockutils [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] Lock "362da311-fa2b-435d-b972-155a3ac22cbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.613477] env[62383]: DEBUG nova.compute.manager [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] No waiting events found dispatching network-vif-plugged-dac828f5-7985-4dd4-9f06-63931ede1877 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.613776] env[62383]: WARNING nova.compute.manager [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Received unexpected event network-vif-plugged-dac828f5-7985-4dd4-9f06-63931ede1877 for instance with vm_state building and task_state spawning. [ 956.614067] env[62383]: DEBUG nova.compute.manager [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Received event network-changed-dac828f5-7985-4dd4-9f06-63931ede1877 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 956.614347] env[62383]: DEBUG nova.compute.manager [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Refreshing instance network info cache due to event network-changed-dac828f5-7985-4dd4-9f06-63931ede1877. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 956.614636] env[62383]: DEBUG oslo_concurrency.lockutils [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] Acquiring lock "refresh_cache-362da311-fa2b-435d-b972-155a3ac22cbb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.616686] env[62383]: DEBUG oslo_vmware.api [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 956.616686] env[62383]: value = "task-2452019" [ 956.616686] env[62383]: _type = "Task" [ 956.616686] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.626904] env[62383]: DEBUG oslo_vmware.api [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.735417] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "refresh_cache-362da311-fa2b-435d-b972-155a3ac22cbb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.736823] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Instance network_info: |[{"id": "dac828f5-7985-4dd4-9f06-63931ede1877", "address": "fa:16:3e:76:b4:00", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdac828f5-79", "ovs_interfaceid": "dac828f5-7985-4dd4-9f06-63931ede1877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 956.736823] env[62383]: DEBUG oslo_concurrency.lockutils [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] Acquired lock "refresh_cache-362da311-fa2b-435d-b972-155a3ac22cbb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.736823] env[62383]: DEBUG nova.network.neutron [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Refreshing network info cache for port dac828f5-7985-4dd4-9f06-63931ede1877 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.737845] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:b4:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d88bb07-f93c-45ca-bce7-230cb1f33833', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dac828f5-7985-4dd4-9f06-63931ede1877', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.747666] env[62383]: DEBUG oslo.service.loopingcall [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 956.748940] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.749257] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5765ba4-35cf-4baf-a570-f81999bc7dc1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.772510] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.772510] env[62383]: value = "task-2452020" [ 956.772510] env[62383]: _type = "Task" [ 956.772510] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.782154] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452020, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.832130] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452017, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.930570] env[62383]: DEBUG nova.compute.manager [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 956.930829] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.931892] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7a67e1-c5bc-4f42-a718-b655650e6d02 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.939631] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.940065] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2aeb8547-e270-41ab-b3e6-32f9cf629949 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.947068] env[62383]: DEBUG oslo_vmware.api [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 956.947068] env[62383]: value = "task-2452021" [ 956.947068] env[62383]: _type = "Task" [ 956.947068] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.957825] env[62383]: DEBUG oslo_vmware.api [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.998987] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8994780e-1b8f-4464-a303-a1e68206e770 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 957.126819] env[62383]: DEBUG oslo_vmware.api [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375238} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.127102] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.127305] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.127522] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.127706] env[62383]: INFO nova.compute.manager [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 957.127948] env[62383]: DEBUG oslo.service.loopingcall [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.128156] env[62383]: DEBUG nova.compute.manager [-] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 957.128249] env[62383]: DEBUG nova.network.neutron [-] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 957.282075] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452020, 'name': CreateVM_Task, 'duration_secs': 0.474294} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.282352] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 957.282931] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.283114] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.283429] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 957.283682] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7be4d324-fb4d-4721-8fcc-de7cb73875de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.288011] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 957.288011] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52745f46-2a1e-1c93-01c1-c6d279f614dc" [ 957.288011] env[62383]: _type = "Task" [ 957.288011] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.295931] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52745f46-2a1e-1c93-01c1-c6d279f614dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.332411] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452017, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52018} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.332670] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9d2e3772-e0b2-450a-9dc8-725c4a05cde4/9d2e3772-e0b2-450a-9dc8-725c4a05cde4.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.332888] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.333149] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fd9a339-6840-4628-98b6-fab2b644cf85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.339043] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 957.339043] env[62383]: value = "task-2452022" [ 957.339043] env[62383]: _type = "Task" [ 957.339043] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.351712] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.457788] env[62383]: DEBUG oslo_vmware.api [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452021, 'name': PowerOffVM_Task, 'duration_secs': 0.26029} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.457999] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.458209] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.458653] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94263582-c2f3-470a-840b-7dea57867052 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.504533] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9f8e346e-815c-492d-84a9-00ebdca3bcc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 957.504533] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 957.504533] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 957.523113] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.523113] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.523113] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleting the datastore file [datastore1] 161d6537-fe78-4a42-b8a5-e3d7d78c0154 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.523113] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34d4faaa-4365-49d4-b854-4b609b3e0d24 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.530013] env[62383]: DEBUG oslo_vmware.api [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for the task: (returnval){ [ 957.530013] env[62383]: value = "task-2452024" [ 957.530013] env[62383]: _type = "Task" [ 957.530013] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.556863] env[62383]: DEBUG oslo_vmware.api [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452024, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.590142] env[62383]: DEBUG nova.network.neutron [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Updated VIF entry in instance network info cache for port dac828f5-7985-4dd4-9f06-63931ede1877. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.590347] env[62383]: DEBUG nova.network.neutron [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Updating instance_info_cache with network_info: [{"id": "dac828f5-7985-4dd4-9f06-63931ede1877", "address": "fa:16:3e:76:b4:00", "network": {"id": "b5041fbf-4c8c-40c2-990e-6fb61ba9c7d8", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-832665113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d09227ae56ba4875954d0107ae5cf5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d88bb07-f93c-45ca-bce7-230cb1f33833", "external-id": "nsx-vlan-transportzone-387", "segmentation_id": 387, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdac828f5-79", "ovs_interfaceid": "dac828f5-7985-4dd4-9f06-63931ede1877", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.801431] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52745f46-2a1e-1c93-01c1-c6d279f614dc, 'name': SearchDatastore_Task, 'duration_secs': 0.010008} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.803706] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.803706] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.804277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.804277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.804368] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.804823] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e97104bc-704b-4e0c-9172-a63ed13e843b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.813529] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.813693] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.814474] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ed7240c-94e3-43e7-9904-f863f8126945 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.822628] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 957.822628] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fbb237-ac24-2a0f-433a-704a570f5953" [ 957.822628] env[62383]: _type = "Task" [ 957.822628] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.829992] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fbb237-ac24-2a0f-433a-704a570f5953, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.849634] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10674} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.849913] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.850786] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934efcf9-fc22-4d08-8c61-e6a2cfa0dae5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.874406] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 9d2e3772-e0b2-450a-9dc8-725c4a05cde4/9d2e3772-e0b2-450a-9dc8-725c4a05cde4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.877370] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6fd3f38-d68c-4b5b-a76a-a44d56880a6b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.896278] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 957.896278] env[62383]: value = "task-2452025" [ 957.896278] env[62383]: _type = "Task" [ 957.896278] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.904105] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452025, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.928457] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479d3dbe-c136-4cca-9360-6e3c131a907f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.935490] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96642e47-d85f-4f5a-9c4a-837f611562b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.964881] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344d3221-e82f-4715-b74b-eacbee3d1705 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.972745] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e9f9a1-0901-4e44-ab4b-ced11e366837 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.984965] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.038766] env[62383]: DEBUG oslo_vmware.api [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Task: {'id': task-2452024, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149941} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.039108] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.039301] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.039506] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.039687] env[62383]: INFO nova.compute.manager [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Took 1.11 seconds to destroy the instance on the hypervisor. [ 958.039927] env[62383]: DEBUG oslo.service.loopingcall [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.040154] env[62383]: DEBUG nova.compute.manager [-] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 958.040256] env[62383]: DEBUG nova.network.neutron [-] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 958.055400] env[62383]: DEBUG nova.network.neutron [-] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.096200] env[62383]: DEBUG oslo_concurrency.lockutils [req-556e327d-2ea1-401d-891c-2b56336d2a0b req-3e0ce122-d871-4d43-ae1b-586872952be4 service nova] Releasing lock "refresh_cache-362da311-fa2b-435d-b972-155a3ac22cbb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.335917] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fbb237-ac24-2a0f-433a-704a570f5953, 'name': SearchDatastore_Task, 'duration_secs': 0.009505} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.336702] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c88ff22-8f76-498a-af63-bb580cd85d09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.341851] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 958.341851] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52fa526e-e7a8-562b-db9b-60445259fab9" [ 958.341851] env[62383]: _type = "Task" [ 958.341851] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.349302] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fa526e-e7a8-562b-db9b-60445259fab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.406247] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452025, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.490853] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 958.560170] env[62383]: INFO nova.compute.manager [-] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Took 1.43 seconds to deallocate network for instance. [ 958.659109] env[62383]: DEBUG nova.compute.manager [req-e8ae4201-89d3-46a9-8cb3-b59e7ed70fb5 req-7b6a2a24-cdcc-47ed-ac1f-bc0043783d32 service nova] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Received event network-vif-deleted-d7087f7e-68a8-4f14-b9a5-db8db1732dc9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 958.659109] env[62383]: DEBUG nova.compute.manager [req-e8ae4201-89d3-46a9-8cb3-b59e7ed70fb5 req-7b6a2a24-cdcc-47ed-ac1f-bc0043783d32 service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Received event network-vif-deleted-859d075c-6248-4d10-83d3-d2985a960584 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 958.659109] env[62383]: INFO nova.compute.manager [req-e8ae4201-89d3-46a9-8cb3-b59e7ed70fb5 req-7b6a2a24-cdcc-47ed-ac1f-bc0043783d32 service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Neutron deleted interface 859d075c-6248-4d10-83d3-d2985a960584; detaching it from the instance and deleting it from the info cache [ 958.660847] env[62383]: DEBUG nova.network.neutron [req-e8ae4201-89d3-46a9-8cb3-b59e7ed70fb5 req-7b6a2a24-cdcc-47ed-ac1f-bc0043783d32 service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.766727] env[62383]: DEBUG nova.network.neutron [-] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.855428] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52fa526e-e7a8-562b-db9b-60445259fab9, 'name': SearchDatastore_Task, 'duration_secs': 0.019552} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.856533] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.856533] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 362da311-fa2b-435d-b972-155a3ac22cbb/362da311-fa2b-435d-b972-155a3ac22cbb.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.857036] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3610411-e1cb-4711-a53b-352b0863c9f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.864065] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 958.864065] env[62383]: value = "task-2452026" [ 958.864065] env[62383]: _type = "Task" [ 958.864065] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.873328] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452026, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.907828] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452025, 'name': ReconfigVM_Task, 'duration_secs': 0.81625} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.908506] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 9d2e3772-e0b2-450a-9dc8-725c4a05cde4/9d2e3772-e0b2-450a-9dc8-725c4a05cde4.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.910967] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7fff121-d75c-47b5-84aa-021bde538635 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.920248] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 958.920248] env[62383]: value = "task-2452027" [ 958.920248] env[62383]: _type = "Task" [ 958.920248] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.928516] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452027, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.993851] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 958.994122] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.561s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.994395] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.114s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.995976] env[62383]: INFO nova.compute.claims [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.064483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.163720] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f7bdb665-d171-4bb5-a42c-2a37e0acd967 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.173885] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003a8cc3-b34d-4310-927f-23472832a1a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.210238] env[62383]: DEBUG nova.compute.manager [req-e8ae4201-89d3-46a9-8cb3-b59e7ed70fb5 req-7b6a2a24-cdcc-47ed-ac1f-bc0043783d32 service nova] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Detach interface failed, port_id=859d075c-6248-4d10-83d3-d2985a960584, reason: Instance 161d6537-fe78-4a42-b8a5-e3d7d78c0154 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 959.268265] env[62383]: INFO nova.compute.manager [-] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Took 1.23 seconds to deallocate network for instance. [ 959.374188] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452026, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.429113] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452027, 'name': Rename_Task, 'duration_secs': 0.183249} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.429476] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.429756] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fecc8350-77c2-4565-a9c3-e6039766d7db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.435704] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 959.435704] env[62383]: value = "task-2452028" [ 959.435704] env[62383]: _type = "Task" [ 959.435704] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.443692] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452028, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.774883] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.875699] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452026, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528493} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.875963] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 362da311-fa2b-435d-b972-155a3ac22cbb/362da311-fa2b-435d-b972-155a3ac22cbb.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.876193] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.876447] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f399a815-9945-492d-9e50-12286e153a68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.883585] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 959.883585] env[62383]: value = "task-2452029" [ 959.883585] env[62383]: _type = "Task" [ 959.883585] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.892526] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.945496] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452028, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.341330] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ba5cc8-d20b-4da8-9322-6b0d140f2e2c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.348907] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4159b683-a667-478e-befa-c0d6e0bfb3d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.383298] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3795f9-f068-4f6e-a6af-a4e1fb5bfe04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.391097] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "eedc7859-3882-4837-9419-f9edce5f12fa" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.391407] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.397027] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bcc535-266a-4305-99f5-df9694be42ec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.405500] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089917} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.406213] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 960.407106] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23a2fcd-56bb-40e5-a317-20c8467e462b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.420337] env[62383]: DEBUG nova.compute.provider_tree [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.444210] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 362da311-fa2b-435d-b972-155a3ac22cbb/362da311-fa2b-435d-b972-155a3ac22cbb.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 960.445330] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-219d3c73-e89d-489f-b2dd-9e8ab3da997d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.469739] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452028, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.471199] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 960.471199] env[62383]: value = "task-2452030" [ 960.471199] env[62383]: _type = "Task" [ 960.471199] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.479220] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452030, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.897511] env[62383]: DEBUG nova.compute.utils [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 960.924596] env[62383]: DEBUG nova.scheduler.client.report [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.954746] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452028, 'name': PowerOnVM_Task, 'duration_secs': 1.093737} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.955074] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.955142] env[62383]: INFO nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Took 8.89 seconds to spawn the instance on the hypervisor. [ 960.955917] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.956832] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f3e21e-bba1-47a0-9668-7a475966618c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.982040] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452030, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.401205] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.430421] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.430890] env[62383]: DEBUG nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 961.435738] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.818s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.435951] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.437899] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.504s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.439247] env[62383]: INFO nova.compute.claims [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 961.442048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "688b0afd-a6e1-4c3f-999d-5975371e888e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.442550] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.478713] env[62383]: INFO nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Took 33.78 seconds to build instance. [ 961.481258] env[62383]: INFO nova.scheduler.client.report [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Deleted allocations for instance c117e858-696f-43dc-9182-70380214737f [ 961.490451] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452030, 'name': ReconfigVM_Task, 'duration_secs': 0.788657} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.491344] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 362da311-fa2b-435d-b972-155a3ac22cbb/362da311-fa2b-435d-b972-155a3ac22cbb.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 961.492097] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab7025ca-05c3-4baf-a49f-a2ae0fedbf0b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.500144] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 961.500144] env[62383]: value = "task-2452031" [ 961.500144] env[62383]: _type = "Task" [ 961.500144] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.511547] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452031, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.937840] env[62383]: DEBUG nova.compute.utils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 961.939427] env[62383]: DEBUG nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Not allocating networking since 'none' was specified. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 961.947365] env[62383]: DEBUG nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 961.981144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.289s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.991906] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2021399e-cbb3-48b4-910f-476972ff1bb5 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "c117e858-696f-43dc-9182-70380214737f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.723s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.014301] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452031, 'name': Rename_Task, 'duration_secs': 0.151807} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.014542] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.014788] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3442853f-49c1-4bfb-88aa-d6e106e8d8e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.021084] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 962.021084] env[62383]: value = "task-2452032" [ 962.021084] env[62383]: _type = "Task" [ 962.021084] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.036883] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.441585] env[62383]: DEBUG nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 962.467188] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "eedc7859-3882-4837-9419-f9edce5f12fa" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.467188] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.467188] env[62383]: INFO nova.compute.manager [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Attaching volume a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b to /dev/sdb [ 962.481057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.536629] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452032, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.545609] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91498cab-8576-4284-b53c-f799982a4249 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.552360] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c19b9c3-eeba-45f5-aa53-ef9d4f400980 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.572626] env[62383]: DEBUG nova.virt.block_device [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating existing volume attachment record: 668d8322-6cb0-4537-a9b2-f4802c1a87e5 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 962.839028] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75aa54ed-8111-4423-b982-9fe428e76804 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.846385] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bc035a-e894-4e15-8b74-6f1c4daa7da8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.876037] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ceb2a4-821e-4373-b2b1-78bd47980459 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.885590] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60593fb3-63bf-4ac8-b4cf-8824c1cec5f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.895970] env[62383]: DEBUG nova.compute.provider_tree [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.034038] env[62383]: DEBUG oslo_vmware.api [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452032, 'name': PowerOnVM_Task, 'duration_secs': 0.542038} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.034393] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 963.034638] env[62383]: INFO nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Took 8.53 seconds to spawn the instance on the hypervisor. [ 963.034830] env[62383]: DEBUG nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 963.035728] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7a895e-4a8a-4fff-9389-0c60fef87d0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.399776] env[62383]: DEBUG nova.scheduler.client.report [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 963.454294] env[62383]: DEBUG nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 963.482330] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 963.482583] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.482741] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 963.482922] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.483113] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 963.483279] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 963.483518] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 963.483686] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 963.483860] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 963.484033] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 963.484212] env[62383]: DEBUG nova.virt.hardware [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 963.485067] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2fac39-c7e9-485b-9931-42e61fed844e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.493353] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f08282c-6409-474e-bc86-6dba42644e0e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.506772] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.512349] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Creating folder: Project (18b7d416b10847199e2306fc47a4896e). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 963.512631] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee0d3630-acf1-4eb0-a9d3-b452f7b093c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.522817] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Created folder: Project (18b7d416b10847199e2306fc47a4896e) in parent group-v496304. [ 963.522994] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Creating folder: Instances. Parent ref: group-v496543. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 963.523218] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3d005b2-8c6f-4f4f-abf7-c5211a1bfd37 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.531670] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Created folder: Instances in parent group-v496543. [ 963.531902] env[62383]: DEBUG oslo.service.loopingcall [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.532094] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 963.532285] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f7aa979-d456-4d93-b182-87bf8fdf18a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.547840] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.547840] env[62383]: value = "task-2452038" [ 963.547840] env[62383]: _type = "Task" [ 963.547840] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.560546] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452038, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.561032] env[62383]: INFO nova.compute.manager [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Took 35.85 seconds to build instance. [ 963.844225] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.844471] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.905207] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.905722] env[62383]: DEBUG nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 963.908273] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.782s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.908489] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.910722] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.111s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.912284] env[62383]: INFO nova.compute.claims [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 963.940801] env[62383]: INFO nova.scheduler.client.report [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleted allocations for instance 1e367665-1d4b-4686-ac79-c946423c1762 [ 964.060290] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452038, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.063734] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc73c0a0-b6d3-450e-b154-12d580c56c69 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "362da311-fa2b-435d-b972-155a3ac22cbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.360s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.348796] env[62383]: DEBUG nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 964.416880] env[62383]: DEBUG nova.compute.utils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 964.420355] env[62383]: DEBUG nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 964.420541] env[62383]: DEBUG nova.network.neutron [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 964.435701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.435701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.435701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.435701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.435701] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.437866] env[62383]: INFO nova.compute.manager [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Terminating instance [ 964.452159] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c3ae17a7-234c-4560-b4f8-bb35f7c0514f tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "1e367665-1d4b-4686-ac79-c946423c1762" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.248s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.493469] env[62383]: DEBUG nova.policy [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '439170ea54df40f6b15796e349882f6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd0caf09f53f24077808f2dac9db8b730', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 964.509527] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "362da311-fa2b-435d-b972-155a3ac22cbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.509772] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "362da311-fa2b-435d-b972-155a3ac22cbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.509984] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "362da311-fa2b-435d-b972-155a3ac22cbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.510217] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "362da311-fa2b-435d-b972-155a3ac22cbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.510391] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "362da311-fa2b-435d-b972-155a3ac22cbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.512431] env[62383]: INFO nova.compute.manager [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Terminating instance [ 964.559110] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452038, 'name': CreateVM_Task, 'duration_secs': 0.525742} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.559326] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 964.559775] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.559933] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.560294] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 964.560550] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb21a99d-1eb6-4e48-905d-c7ba66dd7aae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.564976] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 964.564976] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b61a74-59fa-1dc7-7f71-ec73b912a996" [ 964.564976] env[62383]: _type = "Task" [ 964.564976] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.572550] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b61a74-59fa-1dc7-7f71-ec73b912a996, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.705245] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "b451f9ad-cda6-49a3-801e-acbf121e9552" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.706593] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.890969] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.926784] env[62383]: DEBUG nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 964.934575] env[62383]: DEBUG nova.network.neutron [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Successfully created port: 5e45a953-c424-4939-8fc4-b19fe5f54fae {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.942972] env[62383]: DEBUG nova.compute.manager [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 964.943768] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.944574] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23af76b8-c600-453e-b96c-ba83332dfc4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.952986] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.953299] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbb04fea-fec2-46e6-8928-835488602024 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.959333] env[62383]: DEBUG oslo_vmware.api [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 964.959333] env[62383]: value = "task-2452039" [ 964.959333] env[62383]: _type = "Task" [ 964.959333] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.973225] env[62383]: DEBUG oslo_vmware.api [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.015897] env[62383]: DEBUG nova.compute.manager [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.016154] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.017053] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5052d284-6966-4689-9f7f-3d599802457c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.026583] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.026857] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-375e7c4b-cfc5-4db0-9ec6-236eb91dea5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.033616] env[62383]: DEBUG oslo_vmware.api [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 965.033616] env[62383]: value = "task-2452042" [ 965.033616] env[62383]: _type = "Task" [ 965.033616] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.046883] env[62383]: DEBUG oslo_vmware.api [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.076702] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b61a74-59fa-1dc7-7f71-ec73b912a996, 'name': SearchDatastore_Task, 'duration_secs': 0.013346} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.079412] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.079643] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.079870] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.080025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.080205] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.080671] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7060fb0d-5a09-40e4-80e2-e455d3318d1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.089730] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.089855] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.090643] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03bdb8c0-f0c3-4d6d-9d2b-1f760c5280f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.096062] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 965.096062] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525a65bc-2fb9-fa99-4ea5-0d8bbf47fa53" [ 965.096062] env[62383]: _type = "Task" [ 965.096062] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.107199] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525a65bc-2fb9-fa99-4ea5-0d8bbf47fa53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.208269] env[62383]: DEBUG nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 965.337017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70bdfbf-281f-4a37-a78b-2c283b8896ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.344574] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1855dc3-49fd-4527-9e65-4dfd5f6414c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.374438] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167b353b-8610-48aa-8ae9-5d554cf333c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.382188] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5a4a5b-e530-4ecc-a04c-7b39ce51c036 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.395258] env[62383]: DEBUG nova.compute.provider_tree [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.469661] env[62383]: DEBUG oslo_vmware.api [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452039, 'name': PowerOffVM_Task, 'duration_secs': 0.27794} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.469926] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.470122] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.470366] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15cbee4f-2d9f-404f-a3c7-30d9d56ad0dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.535138] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.535524] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.535636] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleting the datastore file [datastore2] 9d2e3772-e0b2-450a-9dc8-725c4a05cde4 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.535894] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-809f29cb-ad50-41f0-abce-37f97afc9554 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.547562] env[62383]: DEBUG oslo_vmware.api [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452042, 'name': PowerOffVM_Task, 'duration_secs': 0.204042} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.548683] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.548871] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.549180] env[62383]: DEBUG oslo_vmware.api [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 965.549180] env[62383]: value = "task-2452044" [ 965.549180] env[62383]: _type = "Task" [ 965.549180] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.549366] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cb0cf71-5caa-4574-b11e-83334e7d747c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.558531] env[62383]: DEBUG oslo_vmware.api [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.608176] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525a65bc-2fb9-fa99-4ea5-0d8bbf47fa53, 'name': SearchDatastore_Task, 'duration_secs': 0.01013} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.609835] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.610065] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.610266] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleting the datastore file [datastore2] 362da311-fa2b-435d-b972-155a3ac22cbb {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.610532] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3b143a0-7cba-4dcc-82a3-f8eb8256b90e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.612668] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20a0fb41-3952-427d-8f8f-3be528022d88 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.617065] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 965.617065] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c500d1-bfd8-db52-cda4-81a9d5680fe0" [ 965.617065] env[62383]: _type = "Task" [ 965.617065] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.620828] env[62383]: DEBUG oslo_vmware.api [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for the task: (returnval){ [ 965.620828] env[62383]: value = "task-2452046" [ 965.620828] env[62383]: _type = "Task" [ 965.620828] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.626721] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c500d1-bfd8-db52-cda4-81a9d5680fe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.631838] env[62383]: DEBUG oslo_vmware.api [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.738663] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.899066] env[62383]: DEBUG nova.scheduler.client.report [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 965.940605] env[62383]: DEBUG nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 965.966548] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 965.966799] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.966957] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.967158] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.967302] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.969022] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 965.969022] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 965.969022] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 965.969022] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 965.969022] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 965.969022] env[62383]: DEBUG nova.virt.hardware [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 965.969308] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5154bcf-6376-449b-b4c6-42fa760c6090 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.976803] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25160b5-d147-4240-9ff9-f0063dec8302 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.060640] env[62383]: DEBUG oslo_vmware.api [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143058} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.060930] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.061096] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.061305] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.061497] env[62383]: INFO nova.compute.manager [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 966.061737] env[62383]: DEBUG oslo.service.loopingcall [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.061923] env[62383]: DEBUG nova.compute.manager [-] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.062029] env[62383]: DEBUG nova.network.neutron [-] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.128281] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c500d1-bfd8-db52-cda4-81a9d5680fe0, 'name': SearchDatastore_Task, 'duration_secs': 0.012253} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.128941] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.129280] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 966.129503] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ec9d411e-0a75-4e69-b31f-d35170ea1bb8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.133944] env[62383]: DEBUG oslo_vmware.api [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Task: {'id': task-2452046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174127} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.134492] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.134708] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.134906] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.135102] env[62383]: INFO nova.compute.manager [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Took 1.12 seconds to destroy the instance on the hypervisor. [ 966.135334] env[62383]: DEBUG oslo.service.loopingcall [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.135514] env[62383]: DEBUG nova.compute.manager [-] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.135608] env[62383]: DEBUG nova.network.neutron [-] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.138095] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 966.138095] env[62383]: value = "task-2452047" [ 966.138095] env[62383]: _type = "Task" [ 966.138095] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.146770] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.405426] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.406141] env[62383]: DEBUG nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 966.409893] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.559s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.410186] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.413784] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.753s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.413784] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.416792] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.516s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.417167] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.421398] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.414s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.423686] env[62383]: INFO nova.compute.claims [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.431491] env[62383]: DEBUG nova.compute.manager [req-316bd364-ed46-44ea-a0af-e5777101f519 req-9a517b6d-e629-490b-9ba5-4e56c65a720f service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Received event network-vif-deleted-dac828f5-7985-4dd4-9f06-63931ede1877 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 966.431698] env[62383]: INFO nova.compute.manager [req-316bd364-ed46-44ea-a0af-e5777101f519 req-9a517b6d-e629-490b-9ba5-4e56c65a720f service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Neutron deleted interface dac828f5-7985-4dd4-9f06-63931ede1877; detaching it from the instance and deleting it from the info cache [ 966.431875] env[62383]: DEBUG nova.network.neutron [req-316bd364-ed46-44ea-a0af-e5777101f519 req-9a517b6d-e629-490b-9ba5-4e56c65a720f service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.456818] env[62383]: INFO nova.scheduler.client.report [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleted allocations for instance df5e6f1b-ac01-4ac0-bc84-b49c54c3e771 [ 966.461303] env[62383]: INFO nova.scheduler.client.report [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted allocations for instance 81921762-ac51-42d2-83dc-d5b6e904fbb7 [ 966.471720] env[62383]: DEBUG nova.compute.manager [req-755fb6d0-111c-454a-8bf0-0664d0dfbc92 req-c80c1f99-11a3-4c14-ad15-6b9dc088422f service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Received event network-vif-deleted-20344f03-4957-4f0a-a1bb-5493c7942654 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 966.472335] env[62383]: INFO nova.compute.manager [req-755fb6d0-111c-454a-8bf0-0664d0dfbc92 req-c80c1f99-11a3-4c14-ad15-6b9dc088422f service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Neutron deleted interface 20344f03-4957-4f0a-a1bb-5493c7942654; detaching it from the instance and deleting it from the info cache [ 966.472335] env[62383]: DEBUG nova.network.neutron [req-755fb6d0-111c-454a-8bf0-0664d0dfbc92 req-c80c1f99-11a3-4c14-ad15-6b9dc088422f service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.490315] env[62383]: INFO nova.scheduler.client.report [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Deleted allocations for instance f193af26-eba8-471f-a00e-0afa9b190d0b [ 966.600933] env[62383]: DEBUG nova.network.neutron [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Successfully updated port: 5e45a953-c424-4939-8fc4-b19fe5f54fae {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 966.648131] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452047, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450897} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.648394] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 966.648603] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 966.648849] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-168828a1-26fd-489b-b33d-086c6505b230 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.654624] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 966.654624] env[62383]: value = "task-2452048" [ 966.654624] env[62383]: _type = "Task" [ 966.654624] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.661912] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.879781] env[62383]: DEBUG nova.network.neutron [-] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.888582] env[62383]: DEBUG nova.network.neutron [-] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.931454] env[62383]: DEBUG nova.compute.utils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 966.932792] env[62383]: DEBUG nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 966.932963] env[62383]: DEBUG nova.network.neutron [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 966.935767] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34991c12-cfd5-408d-a942-b823af94fb13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.947801] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bd53ef-8f0c-4213-a07d-6dc449527b56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.965635] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f5333adb-5579-4125-a225-1ecba16a1169 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "df5e6f1b-ac01-4ac0-bc84-b49c54c3e771" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.532s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.968746] env[62383]: DEBUG oslo_concurrency.lockutils [None req-883cd77b-65b0-4445-a1be-8ebc1cbe262a tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "81921762-ac51-42d2-83dc-d5b6e904fbb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.726s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.987036] env[62383]: DEBUG nova.compute.manager [req-316bd364-ed46-44ea-a0af-e5777101f519 req-9a517b6d-e629-490b-9ba5-4e56c65a720f service nova] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Detach interface failed, port_id=dac828f5-7985-4dd4-9f06-63931ede1877, reason: Instance 362da311-fa2b-435d-b972-155a3ac22cbb could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 966.988940] env[62383]: DEBUG nova.policy [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bda8cb7b1005458ca6fc7e5ca6882e6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '603ba5501c904542b6ff0935f620e6da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 966.993159] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca2a12b2-e980-4a8a-8076-586c82024aae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.998492] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea74a2c8-9d1c-4a72-8053-816b8adae1f8 tempest-ServerMetadataNegativeTestJSON-1716396638 tempest-ServerMetadataNegativeTestJSON-1716396638-project-member] Lock "f193af26-eba8-471f-a00e-0afa9b190d0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.631s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.002218] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43478131-314c-4db6-a1c5-380175596109 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.033940] env[62383]: DEBUG nova.compute.manager [req-755fb6d0-111c-454a-8bf0-0664d0dfbc92 req-c80c1f99-11a3-4c14-ad15-6b9dc088422f service nova] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Detach interface failed, port_id=20344f03-4957-4f0a-a1bb-5493c7942654, reason: Instance 9d2e3772-e0b2-450a-9dc8-725c4a05cde4 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 967.103987] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "refresh_cache-4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.104167] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquired lock "refresh_cache-4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.104323] env[62383]: DEBUG nova.network.neutron [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.139927] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 967.140188] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496542', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'name': 'volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'eedc7859-3882-4837-9419-f9edce5f12fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'serial': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 967.141040] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091b6d95-c913-4cd0-a1e7-aadea8dfd289 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.161465] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc20bd11-af5e-4e75-bb49-9dfbe95f02cb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.168979] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05759} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.181479] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 967.188987] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b/volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.189987] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f5ab25-d520-4671-a03c-c474709f6b92 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.193122] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89056b42-6f14-41eb-ab41-06249e6e2e04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.224788] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.225956] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0de8fe59-9bae-4c41-8d90-9aa81e09f078 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.241017] env[62383]: DEBUG oslo_vmware.api [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 967.241017] env[62383]: value = "task-2452049" [ 967.241017] env[62383]: _type = "Task" [ 967.241017] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.246007] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 967.246007] env[62383]: value = "task-2452050" [ 967.246007] env[62383]: _type = "Task" [ 967.246007] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.249226] env[62383]: DEBUG oslo_vmware.api [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.256323] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452050, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.382609] env[62383]: INFO nova.compute.manager [-] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Took 1.25 seconds to deallocate network for instance. [ 967.392026] env[62383]: INFO nova.compute.manager [-] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Took 1.33 seconds to deallocate network for instance. [ 967.439404] env[62383]: DEBUG nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 967.459253] env[62383]: DEBUG nova.network.neutron [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Successfully created port: 9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 967.664416] env[62383]: DEBUG nova.network.neutron [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.751633] env[62383]: DEBUG oslo_vmware.api [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452049, 'name': ReconfigVM_Task, 'duration_secs': 0.40981} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.761075] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfigured VM instance instance-00000048 to attach disk [datastore1] volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b/volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.767189] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be29105a-6f23-4847-9d10-d4bd9814b769 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.784639] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452050, 'name': ReconfigVM_Task, 'duration_secs': 0.371741} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.785363] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 967.787197] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1dd0ceb8-b4d5-44bd-86b4-9700f068e65c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.788995] env[62383]: DEBUG oslo_vmware.api [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 967.788995] env[62383]: value = "task-2452051" [ 967.788995] env[62383]: _type = "Task" [ 967.788995] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.796254] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 967.796254] env[62383]: value = "task-2452052" [ 967.796254] env[62383]: _type = "Task" [ 967.796254] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.802400] env[62383]: DEBUG oslo_vmware.api [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452051, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.807031] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452052, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.834168] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b5c225-b626-4a26-943c-301b06922340 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.844545] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808d50d8-3e5e-41f6-8f87-28a926addcef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.878285] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d9037b-eec1-4855-8a2a-7e27bdfafc50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.886010] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edab64e7-dbc9-4893-88c0-9470c858e8c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.892874] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.894643] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 967.903791] env[62383]: DEBUG nova.compute.provider_tree [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.909049] env[62383]: DEBUG nova.network.neutron [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Updating instance_info_cache with network_info: [{"id": "5e45a953-c424-4939-8fc4-b19fe5f54fae", "address": "fa:16:3e:fd:64:2b", "network": {"id": "12c8f997-3bfd-4f43-a280-742927011839", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1696541748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d0caf09f53f24077808f2dac9db8b730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e45a953-c4", "ovs_interfaceid": "5e45a953-c424-4939-8fc4-b19fe5f54fae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.300307] env[62383]: DEBUG oslo_vmware.api [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452051, 'name': ReconfigVM_Task, 'duration_secs': 0.157034} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.303538] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496542', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'name': 'volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'eedc7859-3882-4837-9419-f9edce5f12fa', 'attached_at': '', 'detached_at': '', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'serial': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 968.310307] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452052, 'name': Rename_Task, 'duration_secs': 0.136676} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.310586] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 968.311913] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d7f7dc7-72fe-4b2e-8019-21c9dba9d308 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.318805] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 968.318805] env[62383]: value = "task-2452053" [ 968.318805] env[62383]: _type = "Task" [ 968.318805] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.327507] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452053, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.409950] env[62383]: DEBUG nova.scheduler.client.report [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.414155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Releasing lock "refresh_cache-4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.414488] env[62383]: DEBUG nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Instance network_info: |[{"id": "5e45a953-c424-4939-8fc4-b19fe5f54fae", "address": "fa:16:3e:fd:64:2b", "network": {"id": "12c8f997-3bfd-4f43-a280-742927011839", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1696541748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d0caf09f53f24077808f2dac9db8b730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e45a953-c4", "ovs_interfaceid": "5e45a953-c424-4939-8fc4-b19fe5f54fae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 968.415324] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:64:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a766368-f5a3-472c-af56-9cfca63012ae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e45a953-c424-4939-8fc4-b19fe5f54fae', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 968.423379] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Creating folder: Project (d0caf09f53f24077808f2dac9db8b730). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 968.423742] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3cb7452-ff93-468d-b21c-b7381d42e5ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.436226] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Created folder: Project (d0caf09f53f24077808f2dac9db8b730) in parent group-v496304. [ 968.436424] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Creating folder: Instances. Parent ref: group-v496546. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 968.436670] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af74b19a-f7ff-48a1-bc8b-592b1e922e8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.447708] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Created folder: Instances in parent group-v496546. [ 968.447708] env[62383]: DEBUG oslo.service.loopingcall [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.447708] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 968.447708] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c33d454-9496-4da2-b7e2-3f16bde26a75 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.463250] env[62383]: DEBUG nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 968.471096] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 968.471096] env[62383]: value = "task-2452056" [ 968.471096] env[62383]: _type = "Task" [ 968.471096] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.482437] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452056, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.501704] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 968.502018] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.502256] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 968.502456] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.502656] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 968.502825] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 968.503102] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 968.503343] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 968.503591] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 968.503799] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 968.504017] env[62383]: DEBUG nova.virt.hardware [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 968.504974] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3e4077-b33a-42af-b6d0-882ab941a1d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.512907] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd567f88-b480-4b91-bdbf-1763ca5cb7fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.575949] env[62383]: DEBUG nova.compute.manager [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Received event network-vif-plugged-5e45a953-c424-4939-8fc4-b19fe5f54fae {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 968.576127] env[62383]: DEBUG oslo_concurrency.lockutils [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] Acquiring lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.576286] env[62383]: DEBUG oslo_concurrency.lockutils [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.576482] env[62383]: DEBUG oslo_concurrency.lockutils [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.576642] env[62383]: DEBUG nova.compute.manager [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] No waiting events found dispatching network-vif-plugged-5e45a953-c424-4939-8fc4-b19fe5f54fae {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 968.576727] env[62383]: WARNING nova.compute.manager [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Received unexpected event network-vif-plugged-5e45a953-c424-4939-8fc4-b19fe5f54fae for instance with vm_state building and task_state spawning. [ 968.576883] env[62383]: DEBUG nova.compute.manager [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Received event network-changed-5e45a953-c424-4939-8fc4-b19fe5f54fae {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 968.577049] env[62383]: DEBUG nova.compute.manager [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Refreshing instance network info cache due to event network-changed-5e45a953-c424-4939-8fc4-b19fe5f54fae. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 968.577236] env[62383]: DEBUG oslo_concurrency.lockutils [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] Acquiring lock "refresh_cache-4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.577372] env[62383]: DEBUG oslo_concurrency.lockutils [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] Acquired lock "refresh_cache-4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.577526] env[62383]: DEBUG nova.network.neutron [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Refreshing network info cache for port 5e45a953-c424-4939-8fc4-b19fe5f54fae {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.829397] env[62383]: DEBUG oslo_vmware.api [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452053, 'name': PowerOnVM_Task, 'duration_secs': 0.493249} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.829715] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.830668] env[62383]: INFO nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Took 5.38 seconds to spawn the instance on the hypervisor. [ 968.830668] env[62383]: DEBUG nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.830897] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc28f66e-1891-4ef7-8fbc-61364e979ed7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.917545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.917839] env[62383]: DEBUG nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 968.921693] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.580s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.922017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.924060] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.860s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.924445] env[62383]: DEBUG nova.objects.instance [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'resources' on Instance uuid 4136466e-d9c6-448a-b392-415bb7c44a8d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.954652] env[62383]: INFO nova.scheduler.client.report [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted allocations for instance 1ab60ef9-4209-4097-8a2c-a55e3a6684b2 [ 968.982031] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452056, 'name': CreateVM_Task, 'duration_secs': 0.357444} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.982173] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.982989] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.983184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.983509] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 968.983868] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c5c02e9-b30a-4f33-9fbb-12e387c95f4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.988722] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 968.988722] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f14126-9ac9-93ad-a3ca-0458dd5d2c4d" [ 968.988722] env[62383]: _type = "Task" [ 968.988722] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.997028] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f14126-9ac9-93ad-a3ca-0458dd5d2c4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.246622] env[62383]: DEBUG nova.network.neutron [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Successfully updated port: 9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 969.337655] env[62383]: DEBUG nova.network.neutron [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Updated VIF entry in instance network info cache for port 5e45a953-c424-4939-8fc4-b19fe5f54fae. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.338352] env[62383]: DEBUG nova.network.neutron [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Updating instance_info_cache with network_info: [{"id": "5e45a953-c424-4939-8fc4-b19fe5f54fae", "address": "fa:16:3e:fd:64:2b", "network": {"id": "12c8f997-3bfd-4f43-a280-742927011839", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1696541748-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d0caf09f53f24077808f2dac9db8b730", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a766368-f5a3-472c-af56-9cfca63012ae", "external-id": "nsx-vlan-transportzone-518", "segmentation_id": 518, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e45a953-c4", "ovs_interfaceid": "5e45a953-c424-4939-8fc4-b19fe5f54fae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.355652] env[62383]: DEBUG nova.objects.instance [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'flavor' on Instance uuid eedc7859-3882-4837-9419-f9edce5f12fa {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.357158] env[62383]: INFO nova.compute.manager [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Took 36.51 seconds to build instance. [ 969.423076] env[62383]: DEBUG nova.compute.utils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.424651] env[62383]: DEBUG nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 969.424843] env[62383]: DEBUG nova.network.neutron [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 969.462334] env[62383]: DEBUG oslo_concurrency.lockutils [None req-80f364b7-4162-4afa-b776-85780c7ed8e2 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1ab60ef9-4209-4097-8a2c-a55e3a6684b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.530s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.496587] env[62383]: DEBUG nova.policy [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4de9dec9c1d2474eb611f4a2623d602d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aead8ea1d1de4d0d8d8c07dec519d8b4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 969.510133] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f14126-9ac9-93ad-a3ca-0458dd5d2c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.011719} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.510793] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.510793] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.511399] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.511399] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.511399] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.511659] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0686de70-b15d-4f2e-ba74-5359fd6c00b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.521285] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.522053] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.525219] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e97c0a38-84da-4847-a1bc-400adb4da175 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.529198] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 969.529198] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52610689-aaae-9130-311e-2eb1be92b617" [ 969.529198] env[62383]: _type = "Task" [ 969.529198] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.545172] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52610689-aaae-9130-311e-2eb1be92b617, 'name': SearchDatastore_Task, 'duration_secs': 0.011} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.546140] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91894101-61c3-4c4b-9537-28a8ab0bf7e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.554158] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 969.554158] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5208b504-ce18-c1b7-6f54-c5d84e080348" [ 969.554158] env[62383]: _type = "Task" [ 969.554158] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.563815] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5208b504-ce18-c1b7-6f54-c5d84e080348, 'name': SearchDatastore_Task, 'duration_secs': 0.00951} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.564087] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.564353] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d/4cfea58a-35cc-4e3f-8f39-0bc00968eb4d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 969.564609] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b94c321-93f0-41a2-9f59-f259f90cab44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.576255] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 969.576255] env[62383]: value = "task-2452057" [ 969.576255] env[62383]: _type = "Task" [ 969.576255] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.587704] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.713469] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.713728] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.713945] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.714158] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.714336] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.717870] env[62383]: INFO nova.compute.manager [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Terminating instance [ 969.751461] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "refresh_cache-8994780e-1b8f-4464-a303-a1e68206e770" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.751615] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "refresh_cache-8994780e-1b8f-4464-a303-a1e68206e770" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.751764] env[62383]: DEBUG nova.network.neutron [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.822103] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1410e14-ddec-46c5-b99e-7fbb8b0dd2c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.834886] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68954ff-5bfd-4efe-9127-588d0deb5a22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.840959] env[62383]: DEBUG oslo_concurrency.lockutils [req-d6f75107-221c-4ad5-bfb0-c1711eef34b3 req-e567ad5c-1200-4b83-beba-8b1def080d6c service nova] Releasing lock "refresh_cache-4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.875753] env[62383]: DEBUG oslo_concurrency.lockutils [None req-db4c9bbc-dc4b-4c5a-8e17-134190da107e tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.041s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.876651] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c58dbed3-cc67-4188-9849-b9c2f53a002a tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.410s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.878229] env[62383]: DEBUG nova.network.neutron [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Successfully created port: 92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 969.883909] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd3f705-57f1-48ac-8dcf-16c50291b034 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.891029] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990ffcf4-d52a-4b48-9ebd-cc17e2a93dca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.909770] env[62383]: DEBUG nova.compute.provider_tree [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.928809] env[62383]: DEBUG nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.091108] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486661} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.091108] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d/4cfea58a-35cc-4e3f-8f39-0bc00968eb4d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 970.092236] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 970.092497] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc99835b-3520-473a-8faa-b4ce45b6d8e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.099878] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 970.099878] env[62383]: value = "task-2452058" [ 970.099878] env[62383]: _type = "Task" [ 970.099878] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.110601] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452058, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.225975] env[62383]: DEBUG nova.compute.manager [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 970.225975] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 970.225975] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c47f1c-88d8-46e9-ba93-a5dd217c8876 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.238187] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.238725] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c397906c-a192-416e-a1e1-6eb3010a40fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.249155] env[62383]: DEBUG oslo_vmware.api [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 970.249155] env[62383]: value = "task-2452059" [ 970.249155] env[62383]: _type = "Task" [ 970.249155] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.263426] env[62383]: DEBUG oslo_vmware.api [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452059, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.314969] env[62383]: DEBUG nova.network.neutron [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 970.413270] env[62383]: DEBUG nova.scheduler.client.report [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.515816] env[62383]: DEBUG nova.network.neutron [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Updating instance_info_cache with network_info: [{"id": "9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e", "address": "fa:16:3e:e6:71:39", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9d7c14-b2", "ovs_interfaceid": "9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.612375] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452058, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.245297} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.612612] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 970.613436] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d21282-1baf-4c19-a521-ed6e291289fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.636913] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d/4cfea58a-35cc-4e3f-8f39-0bc00968eb4d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.637253] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d561b8a-06ea-4108-a4db-e01cf40f4812 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.656860] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 970.656860] env[62383]: value = "task-2452060" [ 970.656860] env[62383]: _type = "Task" [ 970.656860] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.664857] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452060, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.763557] env[62383]: DEBUG oslo_vmware.api [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452059, 'name': PowerOffVM_Task, 'duration_secs': 0.390315} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.763557] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.763682] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.763911] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7ad971c-b660-4b1e-bb98-b8a122731095 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.835897] env[62383]: DEBUG nova.compute.manager [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 970.864967] env[62383]: DEBUG nova.compute.manager [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Received event network-vif-plugged-9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 970.864967] env[62383]: DEBUG oslo_concurrency.lockutils [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] Acquiring lock "8994780e-1b8f-4464-a303-a1e68206e770-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.864967] env[62383]: DEBUG oslo_concurrency.lockutils [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] Lock "8994780e-1b8f-4464-a303-a1e68206e770-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.864967] env[62383]: DEBUG oslo_concurrency.lockutils [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] Lock "8994780e-1b8f-4464-a303-a1e68206e770-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.864967] env[62383]: DEBUG nova.compute.manager [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] No waiting events found dispatching network-vif-plugged-9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 970.864967] env[62383]: WARNING nova.compute.manager [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Received unexpected event network-vif-plugged-9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e for instance with vm_state building and task_state spawning. [ 970.864967] env[62383]: DEBUG nova.compute.manager [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Received event network-changed-9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 970.864967] env[62383]: DEBUG nova.compute.manager [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Refreshing instance network info cache due to event network-changed-9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 970.864967] env[62383]: DEBUG oslo_concurrency.lockutils [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] Acquiring lock "refresh_cache-8994780e-1b8f-4464-a303-a1e68206e770" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.885430] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.885607] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.885797] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleting the datastore file [datastore2] c2fee51e-3cc9-421c-bfe5-b324a5b14197 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.886348] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5dbd05c-96ac-4455-8aa8-5a7fd6b1239c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.893818] env[62383]: DEBUG oslo_vmware.api [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 970.893818] env[62383]: value = "task-2452062" [ 970.893818] env[62383]: _type = "Task" [ 970.893818] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.906161] env[62383]: DEBUG oslo_vmware.api [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.919209] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.995s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.921611] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.147s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.921839] env[62383]: DEBUG nova.objects.instance [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lazy-loading 'resources' on Instance uuid 161d6537-fe78-4a42-b8a5-e3d7d78c0154 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.941192] env[62383]: DEBUG nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 970.945738] env[62383]: INFO nova.scheduler.client.report [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted allocations for instance 4136466e-d9c6-448a-b392-415bb7c44a8d [ 970.973860] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 970.974616] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.974823] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 970.975231] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.975350] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 970.975526] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 970.975777] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 970.975978] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 970.976238] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 970.976475] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 970.976845] env[62383]: DEBUG nova.virt.hardware [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 970.978652] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb74a18f-f977-4bbd-9d47-2f581a4a481d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.986736] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3ede33-a6b1-487e-8c8b-c0ae9a631898 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.018586] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "refresh_cache-8994780e-1b8f-4464-a303-a1e68206e770" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.019007] env[62383]: DEBUG nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Instance network_info: |[{"id": "9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e", "address": "fa:16:3e:e6:71:39", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9d7c14-b2", "ovs_interfaceid": "9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 971.019567] env[62383]: DEBUG oslo_concurrency.lockutils [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] Acquired lock "refresh_cache-8994780e-1b8f-4464-a303-a1e68206e770" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.019756] env[62383]: DEBUG nova.network.neutron [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Refreshing network info cache for port 9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.021011] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:71:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cb971244-43ba-41b4-a6a2-a4558548012c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.029963] env[62383]: DEBUG oslo.service.loopingcall [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.032828] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 971.033293] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-238a3b71-16c1-4cbf-9e1a-c173167c94de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.054916] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 971.054916] env[62383]: value = "task-2452063" [ 971.054916] env[62383]: _type = "Task" [ 971.054916] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.063212] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452063, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.169940] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.233091] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.233091] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.278137] env[62383]: INFO nova.compute.manager [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Rebuilding instance [ 971.283111] env[62383]: DEBUG nova.network.neutron [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Updated VIF entry in instance network info cache for port 9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 971.283654] env[62383]: DEBUG nova.network.neutron [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Updating instance_info_cache with network_info: [{"id": "9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e", "address": "fa:16:3e:e6:71:39", "network": {"id": "946b090a-a04a-4dd4-8350-915e70f01177", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1989089220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "603ba5501c904542b6ff0935f620e6da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cb971244-43ba-41b4-a6a2-a4558548012c", "external-id": "nsx-vlan-transportzone-873", "segmentation_id": 873, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9d7c14-b2", "ovs_interfaceid": "9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.353577] env[62383]: DEBUG nova.compute.manager [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.354715] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd400cce-b6a8-413d-b33a-7f8d7dd4675c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.375256] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.407750] env[62383]: DEBUG oslo_vmware.api [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206245} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.408883] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 971.408883] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 971.408883] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 971.409281] env[62383]: INFO nova.compute.manager [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Took 1.18 seconds to destroy the instance on the hypervisor. [ 971.409729] env[62383]: DEBUG oslo.service.loopingcall [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.410866] env[62383]: DEBUG nova.compute.manager [-] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 971.410998] env[62383]: DEBUG nova.network.neutron [-] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 971.460264] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8213aa23-5c34-4efa-a543-4b242d3bef28 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4136466e-d9c6-448a-b392-415bb7c44a8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.996s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.569020] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452063, 'name': CreateVM_Task, 'duration_secs': 0.371594} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.569020] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 971.569020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.569020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.569020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 971.569020] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3f1d49b-e6dc-49ae-9919-3ddcd3014683 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.573559] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 971.573559] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520e2321-8a2d-45d5-f76d-5e63489c2ea4" [ 971.573559] env[62383]: _type = "Task" [ 971.573559] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.586200] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520e2321-8a2d-45d5-f76d-5e63489c2ea4, 'name': SearchDatastore_Task, 'duration_secs': 0.009448} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.586508] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.587661] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.587661] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.587661] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.587661] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.587661] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-605f7690-da1f-40d9-9577-a019b664f4be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.595615] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.595791] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 971.596540] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fd09f4f-85bb-4acc-9ff3-7cb7bc67faec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.601875] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 971.601875] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e040c7-3042-1c8d-9999-750f66e73977" [ 971.601875] env[62383]: _type = "Task" [ 971.601875] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.613571] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e040c7-3042-1c8d-9999-750f66e73977, 'name': SearchDatastore_Task, 'duration_secs': 0.007704} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.614687] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5099cc1-c419-4285-89ee-ab3006a12b6e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.624353] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 971.624353] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52820fe2-fa03-7d01-9e06-f22fc1845e00" [ 971.624353] env[62383]: _type = "Task" [ 971.624353] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.634658] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52820fe2-fa03-7d01-9e06-f22fc1845e00, 'name': SearchDatastore_Task, 'duration_secs': 0.008198} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.637799] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.637897] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8994780e-1b8f-4464-a303-a1e68206e770/8994780e-1b8f-4464-a303-a1e68206e770.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 971.638416] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c1d5483-6ac4-4c57-a09d-8e1b3c60ee0c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.645377] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 971.645377] env[62383]: value = "task-2452064" [ 971.645377] env[62383]: _type = "Task" [ 971.645377] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.658364] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.669303] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452060, 'name': ReconfigVM_Task, 'duration_secs': 0.622844} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.669626] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d/4cfea58a-35cc-4e3f-8f39-0bc00968eb4d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.670336] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-799b6fcb-8dd3-4c15-b0f3-2945d91afd52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.677049] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 971.677049] env[62383]: value = "task-2452065" [ 971.677049] env[62383]: _type = "Task" [ 971.677049] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.687717] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452065, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.735282] env[62383]: DEBUG nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 971.787167] env[62383]: DEBUG oslo_concurrency.lockutils [req-51b124a6-33ca-4092-8a6b-7bfca3f37c96 req-1db9ccfd-63ae-46ca-82d8-9662c64b39b4 service nova] Releasing lock "refresh_cache-8994780e-1b8f-4464-a303-a1e68206e770" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.842139] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a72d56e-da15-49c8-8958-98e518e9f174 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.851870] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4127acdf-128b-4529-b307-2f01542afad2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.893542] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556098e9-706c-4706-873a-4b242a8f0ae1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.904286] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aebab20-0b75-48d6-bd1d-4ce5af77fc33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.921700] env[62383]: DEBUG nova.compute.provider_tree [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.961676] env[62383]: DEBUG nova.network.neutron [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Successfully updated port: 92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.157686] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485031} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.158228] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8994780e-1b8f-4464-a303-a1e68206e770/8994780e-1b8f-4464-a303-a1e68206e770.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 972.158664] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 972.159087] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35f126fb-ed49-48a7-8319-2257c134eb29 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.169186] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 972.169186] env[62383]: value = "task-2452066" [ 972.169186] env[62383]: _type = "Task" [ 972.169186] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.180408] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452066, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.189549] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452065, 'name': Rename_Task, 'duration_secs': 0.303423} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.189862] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.190152] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81851a85-f12d-40ac-af7e-b7d736ac0b24 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.196182] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 972.196182] env[62383]: value = "task-2452067" [ 972.196182] env[62383]: _type = "Task" [ 972.196182] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.203989] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452067, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.265221] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.392914] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.393153] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb7140d0-af89-459a-b525-e5b575bf916a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.401349] env[62383]: DEBUG nova.network.neutron [-] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.403357] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 972.403357] env[62383]: value = "task-2452068" [ 972.403357] env[62383]: _type = "Task" [ 972.403357] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.416415] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.424541] env[62383]: DEBUG nova.scheduler.client.report [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.466993] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.467199] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.467388] env[62383]: DEBUG nova.network.neutron [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.679802] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452066, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064679} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.680159] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 972.680961] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65d7908-5a22-4338-9c6e-b115ef8e0b2b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.707082] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 8994780e-1b8f-4464-a303-a1e68206e770/8994780e-1b8f-4464-a303-a1e68206e770.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.709057] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81b3722a-bfeb-4eb9-8f36-a86588a733c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.731702] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452067, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.734125] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 972.734125] env[62383]: value = "task-2452069" [ 972.734125] env[62383]: _type = "Task" [ 972.734125] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.742259] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452069, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.909908] env[62383]: INFO nova.compute.manager [-] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Took 1.50 seconds to deallocate network for instance. [ 972.914734] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452068, 'name': PowerOffVM_Task, 'duration_secs': 0.30839} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.918128] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.918246] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.919621] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4445d0-7602-4478-b648-2024e0c33b6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.926676] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.926778] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-341d03a5-fc8c-4a17-bc71-060df46fc6cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.929028] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.007s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.931326] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.450s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.933539] env[62383]: INFO nova.compute.claims [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 972.954428] env[62383]: INFO nova.scheduler.client.report [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Deleted allocations for instance 161d6537-fe78-4a42-b8a5-e3d7d78c0154 [ 972.961328] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.962298] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.962298] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Deleting the datastore file [datastore1] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.962831] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9434a311-0214-4186-b08b-56d12da12dfe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.975204] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 972.975204] env[62383]: value = "task-2452071" [ 972.975204] env[62383]: _type = "Task" [ 972.975204] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.987376] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.018099] env[62383]: DEBUG nova.network.neutron [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.057790] env[62383]: DEBUG nova.compute.manager [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Received event network-vif-plugged-92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 973.059122] env[62383]: DEBUG oslo_concurrency.lockutils [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] Acquiring lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.059122] env[62383]: DEBUG oslo_concurrency.lockutils [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.059122] env[62383]: DEBUG oslo_concurrency.lockutils [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.059122] env[62383]: DEBUG nova.compute.manager [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] No waiting events found dispatching network-vif-plugged-92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 973.059122] env[62383]: WARNING nova.compute.manager [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Received unexpected event network-vif-plugged-92d4aebb-165f-462e-96ea-53a36bc5eae8 for instance with vm_state building and task_state spawning. [ 973.059660] env[62383]: DEBUG nova.compute.manager [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Received event network-vif-deleted-e04396ac-7097-4a6c-8e34-1a92f30eb36a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 973.059660] env[62383]: DEBUG nova.compute.manager [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Received event network-changed-92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 973.059660] env[62383]: DEBUG nova.compute.manager [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Refreshing instance network info cache due to event network-changed-92d4aebb-165f-462e-96ea-53a36bc5eae8. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 973.059660] env[62383]: DEBUG oslo_concurrency.lockutils [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] Acquiring lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.156606] env[62383]: DEBUG nova.network.neutron [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updating instance_info_cache with network_info: [{"id": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "address": "fa:16:3e:dd:82:52", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92d4aebb-16", "ovs_interfaceid": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.207819] env[62383]: DEBUG oslo_vmware.api [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452067, 'name': PowerOnVM_Task, 'duration_secs': 0.705518} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.208380] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 973.208755] env[62383]: INFO nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Took 7.27 seconds to spawn the instance on the hypervisor. [ 973.209159] env[62383]: DEBUG nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.210253] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b465b76-e247-4e2c-869e-2ed3efd7c363 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.244597] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452069, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.414595] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4d929f43-cea2-41a0-9822-180a2647be2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.415018] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.421589] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.466062] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2717920a-212f-4f70-a948-498dc81dc6db tempest-ImagesTestJSON-668991885 tempest-ImagesTestJSON-668991885-project-member] Lock "161d6537-fe78-4a42-b8a5-e3d7d78c0154" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.045s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.489900] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.310442} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.490240] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.490494] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.490802] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.665018] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.665018] env[62383]: DEBUG nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Instance network_info: |[{"id": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "address": "fa:16:3e:dd:82:52", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92d4aebb-16", "ovs_interfaceid": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 973.665018] env[62383]: DEBUG oslo_concurrency.lockutils [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] Acquired lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.665018] env[62383]: DEBUG nova.network.neutron [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Refreshing network info cache for port 92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.665018] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:82:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92d4aebb-165f-462e-96ea-53a36bc5eae8', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.675129] env[62383]: DEBUG oslo.service.loopingcall [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.676375] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.676746] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7126484f-7955-422a-bcf0-666ef3187975 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.702233] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.702233] env[62383]: value = "task-2452072" [ 973.702233] env[62383]: _type = "Task" [ 973.702233] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.711232] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452072, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.731429] env[62383]: INFO nova.compute.manager [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Took 38.82 seconds to build instance. [ 973.746936] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452069, 'name': ReconfigVM_Task, 'duration_secs': 0.529833} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.747798] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 8994780e-1b8f-4464-a303-a1e68206e770/8994780e-1b8f-4464-a303-a1e68206e770.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.748467] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52de2c64-c32c-48f7-9a9e-8d9d049968cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.757382] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 973.757382] env[62383]: value = "task-2452073" [ 973.757382] env[62383]: _type = "Task" [ 973.757382] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.768240] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452073, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.918710] env[62383]: DEBUG nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 974.210897] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452072, 'name': CreateVM_Task, 'duration_secs': 0.341624} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.211067] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.211767] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.211916] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.213270] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.213320] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d64b75b-9bf9-4e87-96dc-1bab0548a588 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.224679] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 974.224679] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c024f0-267a-6ece-89d8-dcbe8990f7bb" [ 974.224679] env[62383]: _type = "Task" [ 974.224679] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.236408] env[62383]: DEBUG oslo_concurrency.lockutils [None req-91881372-6f8e-41c4-a026-bcfa97d64d8a tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.337s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.236998] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c024f0-267a-6ece-89d8-dcbe8990f7bb, 'name': SearchDatastore_Task, 'duration_secs': 0.009576} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.239284] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.239284] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.239284] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.239284] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.239284] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.239284] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf221b2e-0965-4ddb-be09-cde42b04eb1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.252040] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.252040] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.252040] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7e7bfdc-c1aa-4a3e-82b1-3b8a982cca72 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.267577] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 974.267577] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52374f66-09ae-9215-49d0-06f4eb740773" [ 974.267577] env[62383]: _type = "Task" [ 974.267577] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.275744] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452073, 'name': Rename_Task, 'duration_secs': 0.156067} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.276430] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.276709] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c496f0c-7369-479d-8217-07f9976a7295 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.281871] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52374f66-09ae-9215-49d0-06f4eb740773, 'name': SearchDatastore_Task, 'duration_secs': 0.008906} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.286352] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af96d188-bd1a-42fa-912a-6bb8e8f7c81e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.291743] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 974.291743] env[62383]: value = "task-2452074" [ 974.291743] env[62383]: _type = "Task" [ 974.291743] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.297217] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 974.297217] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5222670b-33d5-bec0-a66d-877e67d6b7d4" [ 974.297217] env[62383]: _type = "Task" [ 974.297217] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.306712] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452074, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.311852] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5222670b-33d5-bec0-a66d-877e67d6b7d4, 'name': SearchDatastore_Task, 'duration_secs': 0.012958} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.312114] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.312625] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.312885] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65eb4019-906a-483d-b365-9075d0647c99 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.320264] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 974.320264] env[62383]: value = "task-2452075" [ 974.320264] env[62383]: _type = "Task" [ 974.320264] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.330522] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.337853] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3aed6e-0d1e-4efa-92ff-0eec5f8832f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.342903] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abfd585-80ef-4c64-8e5a-f1ac243cdc62 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.378066] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb57470-32f7-491b-a6d0-7ea709ffe9d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.386488] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999c9f76-cba7-4e2a-a5db-22fc803041b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.406236] env[62383]: DEBUG nova.compute.provider_tree [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.440918] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.476405] env[62383]: DEBUG nova.network.neutron [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updated VIF entry in instance network info cache for port 92d4aebb-165f-462e-96ea-53a36bc5eae8. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.476756] env[62383]: DEBUG nova.network.neutron [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updating instance_info_cache with network_info: [{"id": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "address": "fa:16:3e:dd:82:52", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92d4aebb-16", "ovs_interfaceid": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.544933] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 974.546030] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.546030] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 974.546030] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.546030] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 974.546030] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 974.546030] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 974.546345] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 974.546345] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 974.546484] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 974.546657] env[62383]: DEBUG nova.virt.hardware [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 974.547623] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10520a14-418f-41b2-8803-f99d1b9acf40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.560223] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f00c185-1bae-4447-8075-d84fba92a862 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.576029] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.582494] env[62383]: DEBUG oslo.service.loopingcall [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.582870] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.583157] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06a8008e-5dca-489d-9504-f34ad0e2000b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.601263] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.601263] env[62383]: value = "task-2452076" [ 974.601263] env[62383]: _type = "Task" [ 974.601263] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.609845] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452076, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.808072] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452074, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.836955] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452075, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.907180] env[62383]: DEBUG nova.scheduler.client.report [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.974216] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.974557] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.982538] env[62383]: DEBUG oslo_concurrency.lockutils [req-000885ff-b0ed-44fb-9e23-f24a4ca6b68a req-1fd19dae-ecb3-49ad-94ea-8576dad3cd20 service nova] Releasing lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.112750] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452076, 'name': CreateVM_Task, 'duration_secs': 0.46769} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.112919] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.113350] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.113521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.113851] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 975.114117] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-738b910b-e1ff-439f-9760-14b610cf68bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.118299] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 975.118299] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5211ef73-846b-d10a-45e2-bbe40e8f0d2d" [ 975.118299] env[62383]: _type = "Task" [ 975.118299] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.125782] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5211ef73-846b-d10a-45e2-bbe40e8f0d2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.303941] env[62383]: DEBUG oslo_vmware.api [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452074, 'name': PowerOnVM_Task, 'duration_secs': 0.777646} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.304307] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 975.304691] env[62383]: INFO nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Took 6.84 seconds to spawn the instance on the hypervisor. [ 975.304850] env[62383]: DEBUG nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 975.305700] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39aee93d-be90-4ed5-a693-2896e831def3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.333773] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452075, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552256} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.334041] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 975.334252] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 975.334500] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60290fb8-25bb-4a40-8554-7619e7d0a699 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.341099] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 975.341099] env[62383]: value = "task-2452077" [ 975.341099] env[62383]: _type = "Task" [ 975.341099] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.350794] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.415208] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.415949] env[62383]: DEBUG nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 975.421205] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.530s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.424314] env[62383]: INFO nova.compute.claims [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 975.431500] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.431660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.431865] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.432058] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.432230] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.436153] env[62383]: INFO nova.compute.manager [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Terminating instance [ 975.476720] env[62383]: DEBUG nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 975.630469] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5211ef73-846b-d10a-45e2-bbe40e8f0d2d, 'name': SearchDatastore_Task, 'duration_secs': 0.019817} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.630726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.630853] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.631228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.631398] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.631434] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.631700] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8b8f09b-5116-4ee6-be19-0f14e556b536 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.641333] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.641519] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.642276] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fc67da8-1b65-4678-b3ea-b53c4cbb4171 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.648227] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 975.648227] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52bbf7f7-cba0-2e1c-ab43-543fe59974d5" [ 975.648227] env[62383]: _type = "Task" [ 975.648227] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.656398] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bbf7f7-cba0-2e1c-ab43-543fe59974d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.822822] env[62383]: INFO nova.compute.manager [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Took 31.04 seconds to build instance. [ 975.851578] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0974} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.851845] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.852649] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881ae677-c1a2-40db-b5c5-e61a0ddf6bc4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.874934] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.875271] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41a5beb5-1ed1-4ffd-986f-b0b68b0cb1dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.894971] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 975.894971] env[62383]: value = "task-2452078" [ 975.894971] env[62383]: _type = "Task" [ 975.894971] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.903170] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.933151] env[62383]: DEBUG nova.compute.utils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 975.939211] env[62383]: DEBUG nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 975.939211] env[62383]: DEBUG nova.network.neutron [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 975.941651] env[62383]: DEBUG nova.compute.manager [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 975.942064] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.942899] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3fa643-3694-4d31-83b0-fec4389e34f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.952806] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 975.953078] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf31df12-b270-4379-a538-dfee87905a15 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.958901] env[62383]: DEBUG oslo_vmware.api [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 975.958901] env[62383]: value = "task-2452079" [ 975.958901] env[62383]: _type = "Task" [ 975.958901] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.968460] env[62383]: DEBUG oslo_vmware.api [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.986554] env[62383]: DEBUG nova.policy [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2187bc92e81e44a482051be8f9b8e9b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74493a7d4f564707b9d1d9165d953244', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 976.009379] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.164021] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bbf7f7-cba0-2e1c-ab43-543fe59974d5, 'name': SearchDatastore_Task, 'duration_secs': 0.014297} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.165045] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-094637d2-4d93-488c-bbd4-601ffe76049d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.170673] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 976.170673] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521dfa86-b21c-2ec3-c8f8-382152d9eae1" [ 976.170673] env[62383]: _type = "Task" [ 976.170673] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.178437] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521dfa86-b21c-2ec3-c8f8-382152d9eae1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.268243] env[62383]: DEBUG nova.network.neutron [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Successfully created port: d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 976.324867] env[62383]: DEBUG oslo_concurrency.lockutils [None req-47529c6d-5f0f-402e-be2e-5e2ebae7970c tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "8994780e-1b8f-4464-a303-a1e68206e770" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.567s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.405611] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452078, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.437845] env[62383]: DEBUG nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 976.470743] env[62383]: DEBUG oslo_vmware.api [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452079, 'name': PowerOffVM_Task, 'duration_secs': 0.223165} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.471012] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 976.472225] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 976.475475] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58962458-3177-4f0a-a611-5b08a77494b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.538475] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 976.538652] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 976.538972] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Deleting the datastore file [datastore2] 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.539078] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a2c3870-b1d7-4410-8664-cb3fcd512d7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.545537] env[62383]: DEBUG oslo_vmware.api [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for the task: (returnval){ [ 976.545537] env[62383]: value = "task-2452081" [ 976.545537] env[62383]: _type = "Task" [ 976.545537] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.556303] env[62383]: DEBUG oslo_vmware.api [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452081, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.685495] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521dfa86-b21c-2ec3-c8f8-382152d9eae1, 'name': SearchDatastore_Task, 'duration_secs': 0.022995} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.685742] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.686010] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.686301] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-656df403-5d6c-437f-b421-3a555733f4c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.695057] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 976.695057] env[62383]: value = "task-2452082" [ 976.695057] env[62383]: _type = "Task" [ 976.695057] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.703059] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.832945] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1341f525-6e00-48d9-8920-c7fdabad55fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.844156] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef954e59-604e-4e37-be8e-2ad65e0d5d53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.885023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69294920-4706-4c1f-996a-173217fdfa9f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.892237] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d16237-599a-4f38-97ff-fef060a1a15d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.905803] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452078, 'name': ReconfigVM_Task, 'duration_secs': 0.810335} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.914219] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.914703] env[62383]: DEBUG nova.compute.provider_tree [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.916266] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-809a5c72-c4ff-44af-8c17-4ad0e765745d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.925667] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 976.925667] env[62383]: value = "task-2452083" [ 976.925667] env[62383]: _type = "Task" [ 976.925667] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.938044] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452083, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.061019] env[62383]: DEBUG oslo_vmware.api [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Task: {'id': task-2452081, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2388} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.061019] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.061019] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.061356] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.061356] env[62383]: INFO nova.compute.manager [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 977.061636] env[62383]: DEBUG oslo.service.loopingcall [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.061844] env[62383]: DEBUG nova.compute.manager [-] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 977.061963] env[62383]: DEBUG nova.network.neutron [-] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 977.207445] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452082, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.419448] env[62383]: DEBUG nova.scheduler.client.report [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 977.443824] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452083, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.463323] env[62383]: DEBUG nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 977.498527] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 977.499065] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 977.499065] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 977.499282] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 977.499358] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 977.499498] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 977.500959] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 977.500959] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 977.500959] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 977.500959] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 977.500959] env[62383]: DEBUG nova.virt.hardware [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 977.501507] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da92053e-d3d4-46bc-8caf-abd19a1f548d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.510292] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dde146-bce8-480c-8a6f-20d4ec034a37 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.548623] env[62383]: DEBUG nova.compute.manager [req-604d8d65-d49e-4bc6-a957-bdfe72dabc30 req-b6063e25-3efb-473f-9de0-4ee18f2c7112 service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Received event network-vif-deleted-5e45a953-c424-4939-8fc4-b19fe5f54fae {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 977.548847] env[62383]: INFO nova.compute.manager [req-604d8d65-d49e-4bc6-a957-bdfe72dabc30 req-b6063e25-3efb-473f-9de0-4ee18f2c7112 service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Neutron deleted interface 5e45a953-c424-4939-8fc4-b19fe5f54fae; detaching it from the instance and deleting it from the info cache [ 977.549149] env[62383]: DEBUG nova.network.neutron [req-604d8d65-d49e-4bc6-a957-bdfe72dabc30 req-b6063e25-3efb-473f-9de0-4ee18f2c7112 service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.705833] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580201} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.706475] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.706475] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.706688] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4821f797-550b-4648-b999-039ea54441bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.715263] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 977.715263] env[62383]: value = "task-2452084" [ 977.715263] env[62383]: _type = "Task" [ 977.715263] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.728767] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452084, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.924516] env[62383]: DEBUG nova.network.neutron [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Successfully updated port: d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 977.929374] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.929374] env[62383]: DEBUG nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 977.932981] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.194s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.933992] env[62383]: INFO nova.compute.claims [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.944136] env[62383]: DEBUG nova.network.neutron [-] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.951422] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452083, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.052375] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c28e15c-e9d3-479b-9296-b0928f8efc8a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.061988] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809163bb-f64d-42d8-bd4f-5d3a2bade3f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.094432] env[62383]: DEBUG nova.compute.manager [req-604d8d65-d49e-4bc6-a957-bdfe72dabc30 req-b6063e25-3efb-473f-9de0-4ee18f2c7112 service nova] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Detach interface failed, port_id=5e45a953-c424-4939-8fc4-b19fe5f54fae, reason: Instance 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 978.226726] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452084, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065824} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.226726] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 978.227672] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3732eac-15a1-4f48-86e6-26e8f637673e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.247269] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 978.247494] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce877810-5bc2-462d-87db-e250d524f657 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.267025] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 978.267025] env[62383]: value = "task-2452085" [ 978.267025] env[62383]: _type = "Task" [ 978.267025] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.275756] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452085, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.428248] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.428426] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquired lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.428493] env[62383]: DEBUG nova.network.neutron [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 978.446146] env[62383]: DEBUG nova.compute.utils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 978.452015] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452083, 'name': Rename_Task, 'duration_secs': 1.299083} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.452015] env[62383]: INFO nova.compute.manager [-] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Took 1.39 seconds to deallocate network for instance. [ 978.452015] env[62383]: DEBUG nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 978.452015] env[62383]: DEBUG nova.network.neutron [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 978.453703] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.456200] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86f404c1-48ed-412b-91ed-3e6e1b38dcdb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.469492] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 978.469492] env[62383]: value = "task-2452086" [ 978.469492] env[62383]: _type = "Task" [ 978.469492] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.477376] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452086, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.511873] env[62383]: DEBUG nova.policy [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e94f486c637c4b9f8c3cfa649688a603', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e320302a6b1e466e887c787006413dec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 978.674752] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "8994780e-1b8f-4464-a303-a1e68206e770" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.674857] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "8994780e-1b8f-4464-a303-a1e68206e770" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.675068] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "8994780e-1b8f-4464-a303-a1e68206e770-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.675256] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "8994780e-1b8f-4464-a303-a1e68206e770-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.675720] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "8994780e-1b8f-4464-a303-a1e68206e770-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.678381] env[62383]: INFO nova.compute.manager [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Terminating instance [ 978.779412] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452085, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.799645] env[62383]: DEBUG nova.network.neutron [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Successfully created port: 61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.951944] env[62383]: DEBUG nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 978.964747] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.974896] env[62383]: DEBUG nova.network.neutron [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 978.983065] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452086, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.153407] env[62383]: DEBUG nova.network.neutron [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.182884] env[62383]: DEBUG nova.compute.manager [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 979.183116] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.186195] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb0521a-b965-4ddc-bb1d-ef0200cdb2f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.194145] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.194406] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ba21000-61e7-4438-bcaf-3b990466a1c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.205850] env[62383]: DEBUG oslo_vmware.api [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 979.205850] env[62383]: value = "task-2452087" [ 979.205850] env[62383]: _type = "Task" [ 979.205850] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.213869] env[62383]: DEBUG oslo_vmware.api [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.276986] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452085, 'name': ReconfigVM_Task, 'duration_secs': 0.533857} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.277301] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab/4e5bd3ee-605f-4770-b658-9cbc3d0010ab.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.280298] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e161b96-d20d-40cf-b695-b49f8a99244f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.287016] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 979.287016] env[62383]: value = "task-2452088" [ 979.287016] env[62383]: _type = "Task" [ 979.287016] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.297205] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452088, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.318807] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e308fcc-b753-4d6e-a078-5ff6c014126f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.326435] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742ef086-5abc-4f83-92f9-b71c294f4ac7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.359985] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3a4029-5c0e-4b34-a208-ddb64906a421 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.368047] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78264aec-3f5e-4c6d-b796-f5c7868cd8ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.385484] env[62383]: DEBUG nova.compute.provider_tree [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 979.479473] env[62383]: DEBUG oslo_vmware.api [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452086, 'name': PowerOnVM_Task, 'duration_secs': 0.882724} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.479813] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.480083] env[62383]: INFO nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Took 8.54 seconds to spawn the instance on the hypervisor. [ 979.480373] env[62383]: DEBUG nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.481527] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0836e37b-cd0b-4e85-b0b3-d10a0de10e97 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.656370] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Releasing lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.657011] env[62383]: DEBUG nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Instance network_info: |[{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 979.657678] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:41:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd094226a-fcbf-4faf-890b-89164713f11f', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.666404] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Creating folder: Project (74493a7d4f564707b9d1d9165d953244). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.667812] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c7a3fcb-cb0b-45e2-a388-0cc8057ccfd1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.671282] env[62383]: DEBUG nova.compute.manager [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received event network-vif-plugged-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 979.671553] env[62383]: DEBUG oslo_concurrency.lockutils [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] Acquiring lock "688b0afd-a6e1-4c3f-999d-5975371e888e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.671764] env[62383]: DEBUG oslo_concurrency.lockutils [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.671962] env[62383]: DEBUG oslo_concurrency.lockutils [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.672190] env[62383]: DEBUG nova.compute.manager [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] No waiting events found dispatching network-vif-plugged-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 979.672410] env[62383]: WARNING nova.compute.manager [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received unexpected event network-vif-plugged-d094226a-fcbf-4faf-890b-89164713f11f for instance with vm_state building and task_state spawning. [ 979.672591] env[62383]: DEBUG nova.compute.manager [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received event network-changed-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 979.672788] env[62383]: DEBUG nova.compute.manager [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing instance network info cache due to event network-changed-d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 979.673027] env[62383]: DEBUG oslo_concurrency.lockutils [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] Acquiring lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.673210] env[62383]: DEBUG oslo_concurrency.lockutils [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] Acquired lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.673433] env[62383]: DEBUG nova.network.neutron [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing network info cache for port d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 979.684590] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Created folder: Project (74493a7d4f564707b9d1d9165d953244) in parent group-v496304. [ 979.688025] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Creating folder: Instances. Parent ref: group-v496552. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 979.688025] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75ff9f16-d6e2-49e9-99a6-ac39d7dec862 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.694582] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Created folder: Instances in parent group-v496552. [ 979.694855] env[62383]: DEBUG oslo.service.loopingcall [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 979.695084] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.695324] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c57f858-bfc8-4dae-9282-9e54591b13bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.720482] env[62383]: DEBUG oslo_vmware.api [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452087, 'name': PowerOffVM_Task, 'duration_secs': 0.183785} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.721822] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.722047] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 979.722305] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.722305] env[62383]: value = "task-2452091" [ 979.722305] env[62383]: _type = "Task" [ 979.722305] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.722513] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-311636b6-8f11-454d-87d5-601b51af60c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.733570] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452091, 'name': CreateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.797204] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 979.797339] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 979.797504] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleting the datastore file [datastore2] 8994780e-1b8f-4464-a303-a1e68206e770 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.800674] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61d2e678-038b-4474-8291-24d3b454aa5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.802499] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452088, 'name': Rename_Task, 'duration_secs': 0.145567} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.802751] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.803386] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-241e6021-2b51-4848-ae2f-d70b47f457cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.808196] env[62383]: DEBUG oslo_vmware.api [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for the task: (returnval){ [ 979.808196] env[62383]: value = "task-2452093" [ 979.808196] env[62383]: _type = "Task" [ 979.808196] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.812375] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 979.812375] env[62383]: value = "task-2452094" [ 979.812375] env[62383]: _type = "Task" [ 979.812375] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.818317] env[62383]: DEBUG oslo_vmware.api [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.823176] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452094, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.889896] env[62383]: DEBUG nova.scheduler.client.report [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.964806] env[62383]: DEBUG nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 979.993674] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.993922] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.994097] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.994283] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.994431] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.994578] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.994785] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.994943] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.995246] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.995432] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.995608] env[62383]: DEBUG nova.virt.hardware [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 980.000534] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031970a1-fcdc-4b50-8ac9-37c03827b8df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.005470] env[62383]: INFO nova.compute.manager [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Took 29.02 seconds to build instance. [ 980.010138] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8304bae5-1299-439f-8c55-0b130e1f630d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.235917] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452091, 'name': CreateVM_Task, 'duration_secs': 0.357967} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.236193] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 980.236843] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.237064] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.237422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 980.237705] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e540778e-9d5e-4f88-9559-dcc9a07f9cf2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.244124] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 980.244124] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524ecac9-7d75-d352-5a41-44247ef9a0e4" [ 980.244124] env[62383]: _type = "Task" [ 980.244124] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.254328] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524ecac9-7d75-d352-5a41-44247ef9a0e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.322663] env[62383]: DEBUG oslo_vmware.api [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Task: {'id': task-2452093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201456} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.325581] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.325790] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 980.325971] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.326172] env[62383]: INFO nova.compute.manager [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Took 1.14 seconds to destroy the instance on the hypervisor. [ 980.326898] env[62383]: DEBUG oslo.service.loopingcall [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.327399] env[62383]: DEBUG oslo_vmware.api [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452094, 'name': PowerOnVM_Task, 'duration_secs': 0.490393} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.327399] env[62383]: DEBUG nova.compute.manager [-] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 980.327509] env[62383]: DEBUG nova.network.neutron [-] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 980.329015] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 980.329234] env[62383]: DEBUG nova.compute.manager [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 980.329997] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1b7645-1311-4aa3-b5eb-150d1aaabf4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.394967] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.395788] env[62383]: DEBUG nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 980.398600] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.506s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.398862] env[62383]: DEBUG nova.objects.instance [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lazy-loading 'resources' on Instance uuid 362da311-fa2b-435d-b972-155a3ac22cbb {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 980.467401] env[62383]: DEBUG nova.network.neutron [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updated VIF entry in instance network info cache for port d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 980.467851] env[62383]: DEBUG nova.network.neutron [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.509720] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba484eef-2493-44cc-a777-ba254f1d7b2b tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.530s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.543305] env[62383]: DEBUG nova.network.neutron [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Successfully updated port: 61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.756446] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524ecac9-7d75-d352-5a41-44247ef9a0e4, 'name': SearchDatastore_Task, 'duration_secs': 0.021652} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.756446] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.756584] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 980.756859] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.757243] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.757386] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 980.757685] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79c0ced4-f8f1-4be5-bf90-3f3cb2c3e63c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.772462] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 980.772699] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 980.773512] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28f57536-1930-4ada-8b30-9938fa731885 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.779434] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 980.779434] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5222a31b-e39c-4124-bfd3-cccc7d32f17d" [ 980.779434] env[62383]: _type = "Task" [ 980.779434] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.787182] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5222a31b-e39c-4124-bfd3-cccc7d32f17d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.852018] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.902202] env[62383]: DEBUG nova.compute.utils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 980.907746] env[62383]: DEBUG nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 980.907746] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 980.947846] env[62383]: DEBUG nova.compute.manager [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Received event network-changed-92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 980.947846] env[62383]: DEBUG nova.compute.manager [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Refreshing instance network info cache due to event network-changed-92d4aebb-165f-462e-96ea-53a36bc5eae8. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 980.947846] env[62383]: DEBUG oslo_concurrency.lockutils [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] Acquiring lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.947846] env[62383]: DEBUG oslo_concurrency.lockutils [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] Acquired lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.948609] env[62383]: DEBUG nova.network.neutron [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Refreshing network info cache for port 92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.963621] env[62383]: DEBUG nova.policy [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0452e2520d954675ab3900351cd3296c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1401660f8f64c72be5f9ea6a0960ce3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 980.970944] env[62383]: DEBUG oslo_concurrency.lockutils [req-916e50ab-c377-428c-a617-4586d026ee36 req-5c5f7d21-6adf-49d8-8e65-7af3a92a3532 service nova] Releasing lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.049214] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.049403] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.049565] env[62383]: DEBUG nova.network.neutron [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.073563] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.074204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.074204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.074204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.074371] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.076645] env[62383]: INFO nova.compute.manager [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Terminating instance [ 981.235262] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144a52a0-8eb1-402a-bb90-0c6ab55ba8b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.243276] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1abe17a-ebad-48d9-b3ed-fca120e6bea9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.274703] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e245beef-4e80-40a7-99a0-6dae88dd6d12 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.285416] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108760bf-8c71-4750-ab37-87a1e8880168 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.294506] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5222a31b-e39c-4124-bfd3-cccc7d32f17d, 'name': SearchDatastore_Task, 'duration_secs': 0.036974} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.302832] env[62383]: DEBUG nova.compute.provider_tree [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 981.304708] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7437319-561d-4022-b8ec-2d8806ddd1bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.311058] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 981.311058] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522a0098-2fc6-4a32-6729-7ace708925d4" [ 981.311058] env[62383]: _type = "Task" [ 981.311058] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.321838] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522a0098-2fc6-4a32-6729-7ace708925d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.365511] env[62383]: DEBUG nova.network.neutron [-] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.408917] env[62383]: DEBUG nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 981.496505] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Successfully created port: 81e6feaa-5115-45fc-b1ca-4d39eef4a23e {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 981.579981] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "refresh_cache-4e5bd3ee-605f-4770-b658-9cbc3d0010ab" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.580236] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquired lock "refresh_cache-4e5bd3ee-605f-4770-b658-9cbc3d0010ab" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.580400] env[62383]: DEBUG nova.network.neutron [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.603289] env[62383]: DEBUG nova.network.neutron [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 981.703232] env[62383]: DEBUG nova.compute.manager [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Received event network-vif-plugged-61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 981.703460] env[62383]: DEBUG oslo_concurrency.lockutils [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] Acquiring lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.703669] env[62383]: DEBUG oslo_concurrency.lockutils [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.703838] env[62383]: DEBUG oslo_concurrency.lockutils [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.704015] env[62383]: DEBUG nova.compute.manager [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] No waiting events found dispatching network-vif-plugged-61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 981.704434] env[62383]: WARNING nova.compute.manager [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Received unexpected event network-vif-plugged-61bdafb3-8c09-454a-af63-5aaacc52947b for instance with vm_state building and task_state spawning. [ 981.704607] env[62383]: DEBUG nova.compute.manager [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Received event network-changed-61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 981.704764] env[62383]: DEBUG nova.compute.manager [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Refreshing instance network info cache due to event network-changed-61bdafb3-8c09-454a-af63-5aaacc52947b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 981.704929] env[62383]: DEBUG oslo_concurrency.lockutils [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] Acquiring lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.772761] env[62383]: DEBUG nova.network.neutron [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updated VIF entry in instance network info cache for port 92d4aebb-165f-462e-96ea-53a36bc5eae8. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 981.773137] env[62383]: DEBUG nova.network.neutron [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updating instance_info_cache with network_info: [{"id": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "address": "fa:16:3e:dd:82:52", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92d4aebb-16", "ovs_interfaceid": "92d4aebb-165f-462e-96ea-53a36bc5eae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.805859] env[62383]: DEBUG nova.scheduler.client.report [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.810060] env[62383]: DEBUG nova.network.neutron [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Updating instance_info_cache with network_info: [{"id": "61bdafb3-8c09-454a-af63-5aaacc52947b", "address": "fa:16:3e:39:7e:34", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bdafb3-8c", "ovs_interfaceid": "61bdafb3-8c09-454a-af63-5aaacc52947b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.830415] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522a0098-2fc6-4a32-6729-7ace708925d4, 'name': SearchDatastore_Task, 'duration_secs': 0.021955} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.830676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.830924] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/688b0afd-a6e1-4c3f-999d-5975371e888e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 981.831201] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62907298-c30a-4f99-99a9-7c6e9fe46125 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.838036] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 981.838036] env[62383]: value = "task-2452095" [ 981.838036] env[62383]: _type = "Task" [ 981.838036] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.848304] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.868345] env[62383]: INFO nova.compute.manager [-] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Took 1.54 seconds to deallocate network for instance. [ 981.936236] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Successfully created port: 672a20b2-ffba-4603-8bbf-93199d33d5df {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.109449] env[62383]: DEBUG nova.network.neutron [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 982.165439] env[62383]: DEBUG nova.network.neutron [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.276137] env[62383]: DEBUG oslo_concurrency.lockutils [req-614abfc3-672e-4e58-af22-2d2ec85a5ef5 req-3eebfb69-38ef-46f9-9de0-5400c34991fd service nova] Releasing lock "refresh_cache-9f8e346e-815c-492d-84a9-00ebdca3bcc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.312169] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.914s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.314923] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.421s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.315219] env[62383]: DEBUG nova.objects.instance [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lazy-loading 'resources' on Instance uuid 9d2e3772-e0b2-450a-9dc8-725c4a05cde4 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.321090] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.321425] env[62383]: DEBUG nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Instance network_info: |[{"id": "61bdafb3-8c09-454a-af63-5aaacc52947b", "address": "fa:16:3e:39:7e:34", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bdafb3-8c", "ovs_interfaceid": "61bdafb3-8c09-454a-af63-5aaacc52947b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 982.321741] env[62383]: DEBUG oslo_concurrency.lockutils [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] Acquired lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.321958] env[62383]: DEBUG nova.network.neutron [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Refreshing network info cache for port 61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 982.323418] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:7e:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61bdafb3-8c09-454a-af63-5aaacc52947b', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.340756] env[62383]: DEBUG oslo.service.loopingcall [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.342761] env[62383]: INFO nova.scheduler.client.report [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted allocations for instance 362da311-fa2b-435d-b972-155a3ac22cbb [ 982.350114] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.354378] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5ea6ab9-d883-4405-a27d-639a40f95d3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.375913] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.378022] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452095, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449413} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.378022] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/688b0afd-a6e1-4c3f-999d-5975371e888e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 982.378429] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 982.378774] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.378774] env[62383]: value = "task-2452096" [ 982.378774] env[62383]: _type = "Task" [ 982.378774] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.379271] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c94ad21a-fe37-4f04-9899-6e9cf93ffb82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.391415] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452096, 'name': CreateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.391906] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 982.391906] env[62383]: value = "task-2452097" [ 982.391906] env[62383]: _type = "Task" [ 982.391906] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.401676] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.420565] env[62383]: DEBUG nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 982.455826] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 982.456204] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 982.456342] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 982.456642] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 982.456918] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 982.457218] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 982.457599] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 982.457880] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 982.458283] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 982.458603] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 982.459046] env[62383]: DEBUG nova.virt.hardware [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 982.460138] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d1c8e9-fb6b-4f6b-aeeb-5821416fc687 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.468988] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb39054f-7425-4793-9f2b-01238645bbb3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.668858] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Releasing lock "refresh_cache-4e5bd3ee-605f-4770-b658-9cbc3d0010ab" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.669415] env[62383]: DEBUG nova.compute.manager [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 982.669616] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.670558] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350fb506-f19f-42a0-87eb-fa2bac35ad3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.678883] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.679276] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c7f2e95-3b3d-4835-b78d-1663fdd12b02 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.686092] env[62383]: DEBUG oslo_vmware.api [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 982.686092] env[62383]: value = "task-2452098" [ 982.686092] env[62383]: _type = "Task" [ 982.686092] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.694202] env[62383]: DEBUG oslo_vmware.api [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.874732] env[62383]: DEBUG oslo_concurrency.lockutils [None req-3cd1214c-ae51-4b75-a464-526e63626bd7 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "362da311-fa2b-435d-b972-155a3ac22cbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.365s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.894350] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452096, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.904661] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142861} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.907284] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 982.908294] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9620387d-3b76-4282-aba3-a77751014791 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.930666] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/688b0afd-a6e1-4c3f-999d-5975371e888e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.935626] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-398c198e-a738-4d18-9dad-1930493ece3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.958033] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 982.958033] env[62383]: value = "task-2452099" [ 982.958033] env[62383]: _type = "Task" [ 982.958033] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.968038] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.034760] env[62383]: DEBUG nova.compute.manager [req-75120f03-08c1-4286-8b6e-ab8ac54912be req-afd54169-ac73-4f0c-8fa3-a619122d7ab6 service nova] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Received event network-vif-deleted-9a9d7c14-b2f1-4d3b-9357-e46e5e60e52e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 983.085530] env[62383]: DEBUG nova.network.neutron [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Updated VIF entry in instance network info cache for port 61bdafb3-8c09-454a-af63-5aaacc52947b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 983.085643] env[62383]: DEBUG nova.network.neutron [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Updating instance_info_cache with network_info: [{"id": "61bdafb3-8c09-454a-af63-5aaacc52947b", "address": "fa:16:3e:39:7e:34", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bdafb3-8c", "ovs_interfaceid": "61bdafb3-8c09-454a-af63-5aaacc52947b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.168984] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b60d2fe-023a-4590-9091-b06dd6c21bd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.178357] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afb452f-81f1-428f-b650-d47a5f96fc3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.219014] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe327fe-ccdc-4f88-8f25-63f61c4fc0d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.229676] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236ff34c-90bc-4c45-85a8-21a52bf8a206 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.233355] env[62383]: DEBUG oslo_vmware.api [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.245138] env[62383]: DEBUG nova.compute.provider_tree [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.392422] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452096, 'name': CreateVM_Task, 'duration_secs': 0.524577} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.392672] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.393286] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.393454] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.393787] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 983.394059] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff17a1fe-3a74-4d13-8d17-8cc165d2f5f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.398525] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 983.398525] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526d33de-032c-39f7-c387-e9198807c261" [ 983.398525] env[62383]: _type = "Task" [ 983.398525] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.406166] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526d33de-032c-39f7-c387-e9198807c261, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.466858] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.589012] env[62383]: DEBUG oslo_concurrency.lockutils [req-14f849c8-3968-46da-b7ab-58a938c0274a req-ffada082-5053-41f9-ac30-84fae32a2307 service nova] Releasing lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.724466] env[62383]: DEBUG oslo_vmware.api [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452098, 'name': PowerOffVM_Task, 'duration_secs': 0.738066} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.724735] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.724904] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.725233] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80bf8eda-f407-4169-9961-b36a6afb044f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.751603] env[62383]: DEBUG nova.scheduler.client.report [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.758399] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.758675] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.758901] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Deleting the datastore file [datastore2] 4e5bd3ee-605f-4770-b658-9cbc3d0010ab {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.759535] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5f0dc77-d439-4929-bd43-e797b26877ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.765643] env[62383]: DEBUG oslo_vmware.api [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for the task: (returnval){ [ 983.765643] env[62383]: value = "task-2452101" [ 983.765643] env[62383]: _type = "Task" [ 983.765643] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.775622] env[62383]: DEBUG oslo_vmware.api [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.904069] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Successfully updated port: 81e6feaa-5115-45fc-b1ca-4d39eef4a23e {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 983.912034] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526d33de-032c-39f7-c387-e9198807c261, 'name': SearchDatastore_Task, 'duration_secs': 0.010738} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.912341] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.912713] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.912960] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.913219] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.913563] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.913755] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6faede79-eeee-4be7-885f-530d2d51accc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.922987] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.923200] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.923937] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e2049d7-994a-49b5-a078-c9aced867dee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.929704] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 983.929704] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6440c-b09e-f3d3-4ba8-ef71c1a32045" [ 983.929704] env[62383]: _type = "Task" [ 983.929704] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.940462] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6440c-b09e-f3d3-4ba8-ef71c1a32045, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.967487] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452099, 'name': ReconfigVM_Task, 'duration_secs': 0.913671} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.967755] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/688b0afd-a6e1-4c3f-999d-5975371e888e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.968405] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82ee9151-2545-4f67-9a37-9143d1472b7a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.973805] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 983.973805] env[62383]: value = "task-2452102" [ 983.973805] env[62383]: _type = "Task" [ 983.973805] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.981256] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452102, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.261640] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.263775] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.892s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.274596] env[62383]: DEBUG oslo_vmware.api [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Task: {'id': task-2452101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163778} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.274693] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.275041] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.275041] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.275154] env[62383]: INFO nova.compute.manager [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Took 1.61 seconds to destroy the instance on the hypervisor. [ 984.275452] env[62383]: DEBUG oslo.service.loopingcall [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.276116] env[62383]: DEBUG nova.compute.manager [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 984.276238] env[62383]: DEBUG nova.network.neutron [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 984.287919] env[62383]: INFO nova.scheduler.client.report [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Deleted allocations for instance 9d2e3772-e0b2-450a-9dc8-725c4a05cde4 [ 984.295057] env[62383]: DEBUG nova.network.neutron [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 984.443823] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b6440c-b09e-f3d3-4ba8-ef71c1a32045, 'name': SearchDatastore_Task, 'duration_secs': 0.036991} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.445075] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f68403f1-caa0-4ddb-8c1d-fe4bec1fb85d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.452122] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 984.452122] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5250d6ce-9719-f2df-2a60-34cb759a3e75" [ 984.452122] env[62383]: _type = "Task" [ 984.452122] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.464263] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5250d6ce-9719-f2df-2a60-34cb759a3e75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.487464] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452102, 'name': Rename_Task, 'duration_secs': 0.293482} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.487889] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.488257] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65e8ca70-9f74-48ed-902d-ca8db631364b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.496141] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 984.496141] env[62383]: value = "task-2452103" [ 984.496141] env[62383]: _type = "Task" [ 984.496141] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.509523] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.768701] env[62383]: INFO nova.compute.claims [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.797339] env[62383]: DEBUG nova.network.neutron [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.798510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a38cb26e-c63b-46b4-ad59-cff15f747613 tempest-MultipleCreateTestJSON-1801448780 tempest-MultipleCreateTestJSON-1801448780-project-member] Lock "9d2e3772-e0b2-450a-9dc8-725c4a05cde4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.364s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.964139] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5250d6ce-9719-f2df-2a60-34cb759a3e75, 'name': SearchDatastore_Task, 'duration_secs': 0.010293} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.964427] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.964690] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.964957] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2ad1d285-849a-4c9e-84bb-6da4b6100f19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.974570] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 984.974570] env[62383]: value = "task-2452104" [ 984.974570] env[62383]: _type = "Task" [ 984.974570] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.974570] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.974570] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.974849] env[62383]: INFO nova.compute.manager [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Shelving [ 984.986139] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.009527] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452103, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.065233] env[62383]: DEBUG nova.compute.manager [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received event network-vif-plugged-81e6feaa-5115-45fc-b1ca-4d39eef4a23e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 985.065462] env[62383]: DEBUG oslo_concurrency.lockutils [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] Acquiring lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.065621] env[62383]: DEBUG oslo_concurrency.lockutils [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.065767] env[62383]: DEBUG oslo_concurrency.lockutils [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.065933] env[62383]: DEBUG nova.compute.manager [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] No waiting events found dispatching network-vif-plugged-81e6feaa-5115-45fc-b1ca-4d39eef4a23e {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 985.066106] env[62383]: WARNING nova.compute.manager [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received unexpected event network-vif-plugged-81e6feaa-5115-45fc-b1ca-4d39eef4a23e for instance with vm_state building and task_state spawning. [ 985.066269] env[62383]: DEBUG nova.compute.manager [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received event network-changed-81e6feaa-5115-45fc-b1ca-4d39eef4a23e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 985.066422] env[62383]: DEBUG nova.compute.manager [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Refreshing instance network info cache due to event network-changed-81e6feaa-5115-45fc-b1ca-4d39eef4a23e. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 985.066599] env[62383]: DEBUG oslo_concurrency.lockutils [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] Acquiring lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.066735] env[62383]: DEBUG oslo_concurrency.lockutils [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] Acquired lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.066890] env[62383]: DEBUG nova.network.neutron [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Refreshing network info cache for port 81e6feaa-5115-45fc-b1ca-4d39eef4a23e {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 985.275659] env[62383]: INFO nova.compute.resource_tracker [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating resource usage from migration 512419a1-769c-4f8b-bdc6-fd406dd76c47 [ 985.300257] env[62383]: INFO nova.compute.manager [-] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Took 1.02 seconds to deallocate network for instance. [ 985.487254] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452104, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.508764] env[62383]: DEBUG oslo_vmware.api [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452103, 'name': PowerOnVM_Task, 'duration_secs': 0.515043} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.511789] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 985.512032] env[62383]: INFO nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Took 8.05 seconds to spawn the instance on the hypervisor. [ 985.512221] env[62383]: DEBUG nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 985.513347] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34b1bda-5312-4960-adfc-bbe27c244bc0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.617784] env[62383]: DEBUG nova.network.neutron [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.637483] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88844859-893f-4d79-b3f9-36360f0dc6fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.648632] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9899fc9c-a180-4acc-a362-2c54361365d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.684032] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3834e8-af1c-4e1f-adf8-ddc652f2d3f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.692257] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c928ab9-1157-454e-a524-918c22fa7ac1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.705728] env[62383]: DEBUG nova.compute.provider_tree [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.795815] env[62383]: DEBUG nova.network.neutron [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.807277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.985153] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.682373} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.985400] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 985.985619] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 985.985876] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5548dbda-42f8-4860-9bc3-ad52bc142dd1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.992862] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.993163] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 985.993163] env[62383]: value = "task-2452105" [ 985.993163] env[62383]: _type = "Task" [ 985.993163] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.993374] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-114407d7-10eb-4b6d-b57e-7f030fc335ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.002948] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.004567] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 986.004567] env[62383]: value = "task-2452106" [ 986.004567] env[62383]: _type = "Task" [ 986.004567] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.013032] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.038888] env[62383]: INFO nova.compute.manager [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Took 23.58 seconds to build instance. [ 986.132927] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Successfully updated port: 672a20b2-ffba-4603-8bbf-93199d33d5df {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 986.210506] env[62383]: DEBUG nova.scheduler.client.report [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 986.298623] env[62383]: DEBUG oslo_concurrency.lockutils [req-70170f9c-429f-4a0f-83de-84e450e6297f req-b426dff0-a7a1-450a-9d18-9b17d66bb0b2 service nova] Releasing lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.505343] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079433} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.505343] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 986.508806] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415343a9-145a-4256-a737-80dc0a2f2876 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.517915] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452106, 'name': PowerOffVM_Task, 'duration_secs': 0.204219} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.527307] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.536436] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 986.537233] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04abb07d-0f9c-4174-a399-3f16a86e1cd3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.539861] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3089a03-f484-4f1e-82d9-3a3d60457d52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.554137] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9d291a4d-2529-4106-9670-4c7c83db8004 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.111s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.571048] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880bbcce-0d3a-42b4-bb0e-a96786f9fa19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.573692] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 986.573692] env[62383]: value = "task-2452107" [ 986.573692] env[62383]: _type = "Task" [ 986.573692] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.588467] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.636212] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.636499] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.636742] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.716075] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.452s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.716075] env[62383]: INFO nova.compute.manager [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Migrating [ 986.724723] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.458s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.724723] env[62383]: INFO nova.compute.claims [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.086030] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 987.088404] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d57392b3-0f54-4d3b-ace3-c8e5a9036efb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.091586] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.098330] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 987.098330] env[62383]: value = "task-2452108" [ 987.098330] env[62383]: _type = "Task" [ 987.098330] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.106150] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452108, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.121947] env[62383]: DEBUG nova.compute.manager [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received event network-vif-plugged-672a20b2-ffba-4603-8bbf-93199d33d5df {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 987.122203] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] Acquiring lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.122445] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.122614] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.122787] env[62383]: DEBUG nova.compute.manager [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] No waiting events found dispatching network-vif-plugged-672a20b2-ffba-4603-8bbf-93199d33d5df {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.123107] env[62383]: WARNING nova.compute.manager [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received unexpected event network-vif-plugged-672a20b2-ffba-4603-8bbf-93199d33d5df for instance with vm_state building and task_state spawning. [ 987.123505] env[62383]: DEBUG nova.compute.manager [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received event network-changed-672a20b2-ffba-4603-8bbf-93199d33d5df {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 987.123505] env[62383]: DEBUG nova.compute.manager [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Refreshing instance network info cache due to event network-changed-672a20b2-ffba-4603-8bbf-93199d33d5df. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 987.123641] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] Acquiring lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.184964] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.240548] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.240785] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.241422] env[62383]: DEBUG nova.network.neutron [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.243216] env[62383]: INFO nova.compute.manager [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Rescuing [ 987.243447] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 987.243598] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquired lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.243761] env[62383]: DEBUG nova.network.neutron [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.501395] env[62383]: DEBUG nova.network.neutron [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Updating instance_info_cache with network_info: [{"id": "81e6feaa-5115-45fc-b1ca-4d39eef4a23e", "address": "fa:16:3e:63:da:b7", "network": {"id": "91056119-609b-4720-bccd-d0edadf06271", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-376917498", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81e6feaa-51", "ovs_interfaceid": "81e6feaa-5115-45fc-b1ca-4d39eef4a23e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "672a20b2-ffba-4603-8bbf-93199d33d5df", "address": "fa:16:3e:08:fc:c4", "network": {"id": "bbd9de8f-7122-4488-abaf-b0e602ff9f92", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-928016594", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap672a20b2-ff", "ovs_interfaceid": "672a20b2-ffba-4603-8bbf-93199d33d5df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.586659] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452107, 'name': ReconfigVM_Task, 'duration_secs': 0.752218} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.587052] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.587686] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ecf2463-7dec-415d-af07-61167d4829ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.595309] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 987.595309] env[62383]: value = "task-2452109" [ 987.595309] env[62383]: _type = "Task" [ 987.595309] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.609828] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452109, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.613273] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452108, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.004329] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Releasing lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.004815] env[62383]: DEBUG nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Instance network_info: |[{"id": "81e6feaa-5115-45fc-b1ca-4d39eef4a23e", "address": "fa:16:3e:63:da:b7", "network": {"id": "91056119-609b-4720-bccd-d0edadf06271", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-376917498", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81e6feaa-51", "ovs_interfaceid": "81e6feaa-5115-45fc-b1ca-4d39eef4a23e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "672a20b2-ffba-4603-8bbf-93199d33d5df", "address": "fa:16:3e:08:fc:c4", "network": {"id": "bbd9de8f-7122-4488-abaf-b0e602ff9f92", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-928016594", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap672a20b2-ff", "ovs_interfaceid": "672a20b2-ffba-4603-8bbf-93199d33d5df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 988.005245] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] Acquired lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.005459] env[62383]: DEBUG nova.network.neutron [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Refreshing network info cache for port 672a20b2-ffba-4603-8bbf-93199d33d5df {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.006855] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:da:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55bd18a7-39a8-4d07-9088-9b944f9ff710', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81e6feaa-5115-45fc-b1ca-4d39eef4a23e', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:fc:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cde23701-02ca-4cb4-b5a6-d321f8ac9660', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '672a20b2-ffba-4603-8bbf-93199d33d5df', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.017034] env[62383]: DEBUG oslo.service.loopingcall [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.020816] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.021557] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a365bad4-cae3-40c5-aeaf-a7edea667395 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.050540] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.050540] env[62383]: value = "task-2452110" [ 988.050540] env[62383]: _type = "Task" [ 988.050540] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.062339] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452110, 'name': CreateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.107887] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452109, 'name': Rename_Task, 'duration_secs': 0.149873} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.114555] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.114848] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452108, 'name': CreateSnapshot_Task, 'duration_secs': 0.692921} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.115985] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5214eb55-2810-40ba-aeac-9137be331a80 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.116723] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 988.117445] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9880c44c-2d7b-42c7-be28-83c31612f503 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.123538] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 988.123538] env[62383]: value = "task-2452111" [ 988.123538] env[62383]: _type = "Task" [ 988.123538] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.133602] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f8db90-32ce-4e80-b923-55a9a486a394 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.136932] env[62383]: DEBUG nova.network.neutron [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.143535] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452111, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.144567] env[62383]: DEBUG nova.network.neutron [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.148693] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2272b9-da2b-4a3c-8b85-29593472bfde {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.183733] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58637c3b-9371-4573-9087-e55c55bde78c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.192390] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f10f90-84f3-416e-a4ab-aa3db76d80eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.206963] env[62383]: DEBUG nova.compute.provider_tree [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.562256] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452110, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.649836] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 988.651479] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Releasing lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.654235] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452111, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.658830] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.660326] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4ce9d8f6-a308-45db-a640-af4af2189ce1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.676542] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 988.676542] env[62383]: value = "task-2452112" [ 988.676542] env[62383]: _type = "Task" [ 988.676542] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.687161] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452112, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.709771] env[62383]: DEBUG nova.scheduler.client.report [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.816965] env[62383]: DEBUG nova.network.neutron [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Updated VIF entry in instance network info cache for port 672a20b2-ffba-4603-8bbf-93199d33d5df. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.817433] env[62383]: DEBUG nova.network.neutron [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Updating instance_info_cache with network_info: [{"id": "81e6feaa-5115-45fc-b1ca-4d39eef4a23e", "address": "fa:16:3e:63:da:b7", "network": {"id": "91056119-609b-4720-bccd-d0edadf06271", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-376917498", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.34", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55bd18a7-39a8-4d07-9088-9b944f9ff710", "external-id": "nsx-vlan-transportzone-686", "segmentation_id": 686, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81e6feaa-51", "ovs_interfaceid": "81e6feaa-5115-45fc-b1ca-4d39eef4a23e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "672a20b2-ffba-4603-8bbf-93199d33d5df", "address": "fa:16:3e:08:fc:c4", "network": {"id": "bbd9de8f-7122-4488-abaf-b0e602ff9f92", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-928016594", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "e1401660f8f64c72be5f9ea6a0960ce3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cde23701-02ca-4cb4-b5a6-d321f8ac9660", "external-id": "nsx-vlan-transportzone-586", "segmentation_id": 586, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap672a20b2-ff", "ovs_interfaceid": "672a20b2-ffba-4603-8bbf-93199d33d5df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.060932] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452110, 'name': CreateVM_Task, 'duration_secs': 0.63079} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.061794] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 989.062596] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.062830] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.063193] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 989.063505] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bad2e7af-ff5b-49c9-9fb5-8792a143eea8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.068600] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 989.068600] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521fdcb7-c6f0-fbbf-5f0f-02a769c5c0b6" [ 989.068600] env[62383]: _type = "Task" [ 989.068600] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.078199] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521fdcb7-c6f0-fbbf-5f0f-02a769c5c0b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.139286] env[62383]: DEBUG oslo_vmware.api [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452111, 'name': PowerOnVM_Task, 'duration_secs': 0.895977} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.139558] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.139817] env[62383]: INFO nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Took 9.17 seconds to spawn the instance on the hypervisor. [ 989.140139] env[62383]: DEBUG nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 989.141707] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69524f7-3d58-45b0-8b66-09f91836a8a6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.184077] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452112, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.215863] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.493s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.216559] env[62383]: DEBUG nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 989.220790] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.800s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.221089] env[62383]: DEBUG nova.objects.instance [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lazy-loading 'resources' on Instance uuid c2fee51e-3cc9-421c-bfe5-b324a5b14197 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 989.324623] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5fb279b-8528-44e4-a4ad-63b93e297682 req-19fc1b4f-06be-4a7c-94eb-421d1ced4db6 service nova] Releasing lock "refresh_cache-b451f9ad-cda6-49a3-801e-acbf121e9552" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.581440] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521fdcb7-c6f0-fbbf-5f0f-02a769c5c0b6, 'name': SearchDatastore_Task, 'duration_secs': 0.021191} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.581769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.582015] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.582260] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.582407] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.582590] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.582866] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-930212fa-a495-4620-afaf-6559ddf64624 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.592350] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.592553] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.593710] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e4c928f-4323-42fe-8ce6-b34af47e758d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.599219] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 989.599219] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524e2f15-018a-3fb1-da1b-d0bbf07f0931" [ 989.599219] env[62383]: _type = "Task" [ 989.599219] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.607543] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524e2f15-018a-3fb1-da1b-d0bbf07f0931, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.667013] env[62383]: INFO nova.compute.manager [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Took 24.81 seconds to build instance. [ 989.688356] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452112, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.722630] env[62383]: DEBUG nova.compute.utils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 989.725034] env[62383]: DEBUG nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 989.725034] env[62383]: DEBUG nova.network.neutron [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 989.835447] env[62383]: DEBUG nova.policy [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db7e9998210e485fa855f0375f63ad55', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35016a724e7e4fa2b0fc19396d8e736b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 990.116237] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524e2f15-018a-3fb1-da1b-d0bbf07f0931, 'name': SearchDatastore_Task, 'duration_secs': 0.010441} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.120906] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cdac52d-2bfd-4623-839a-ad906d7a370c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.129297] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 990.129297] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528ff1cc-8513-18f8-cfde-b8434d09f8b4" [ 990.129297] env[62383]: _type = "Task" [ 990.129297] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.146737] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528ff1cc-8513-18f8-cfde-b8434d09f8b4, 'name': SearchDatastore_Task, 'duration_secs': 0.009741} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.146737] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.146737] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] b451f9ad-cda6-49a3-801e-acbf121e9552/b451f9ad-cda6-49a3-801e-acbf121e9552.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 990.146737] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35935b07-0d50-408a-98a5-3d6d7a7f9224 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.154751] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 990.154751] env[62383]: value = "task-2452113" [ 990.154751] env[62383]: _type = "Task" [ 990.154751] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.161987] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd697f1-fd45-4ddd-9e39-42f7fc24d1c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.170260] env[62383]: DEBUG oslo_concurrency.lockutils [None req-792a5f12-20c1-4ccd-8b37-96043775b68b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.326s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.170527] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452113, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.173330] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb846dcf-292d-4ec7-8251-698b70081daf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.211898] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.212451] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452112, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.213390] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf77651b-8e3e-4a0f-a329-473762b55a9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.215602] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd4e161-1d44-4c77-83a7-c48fdb1a54cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.218470] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de89462-9d8b-4cae-8038-f6a575241710 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.248411] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7610683-6bb9-48fe-a14d-f815ce1ebdb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.253098] env[62383]: DEBUG nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 990.255838] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance 'eedc7859-3882-4837-9419-f9edce5f12fa' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.259853] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 990.259853] env[62383]: value = "task-2452114" [ 990.259853] env[62383]: _type = "Task" [ 990.259853] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.279253] env[62383]: DEBUG nova.compute.provider_tree [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.285978] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.673050] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452113, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.688758] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452112, 'name': CloneVM_Task, 'duration_secs': 1.788624} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.689092] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Created linked-clone VM from snapshot [ 990.689962] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa682f0-b062-4260-bee3-092cbd16def3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.700270] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Uploading image 83286fd2-a028-4799-8ed9-fae62546d213 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 990.735683] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 990.735683] env[62383]: value = "vm-496558" [ 990.735683] env[62383]: _type = "VirtualMachine" [ 990.735683] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 990.736102] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ca473589-ce81-4cfe-9681-37390d5cdfbb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.744547] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lease: (returnval){ [ 990.744547] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4c4ae-5c3f-b21f-e2fd-fb15dccf3a68" [ 990.744547] env[62383]: _type = "HttpNfcLease" [ 990.744547] env[62383]: } obtained for exporting VM: (result){ [ 990.744547] env[62383]: value = "vm-496558" [ 990.744547] env[62383]: _type = "VirtualMachine" [ 990.744547] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 990.744867] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the lease: (returnval){ [ 990.744867] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4c4ae-5c3f-b21f-e2fd-fb15dccf3a68" [ 990.744867] env[62383]: _type = "HttpNfcLease" [ 990.744867] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 990.751753] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 990.751753] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4c4ae-5c3f-b21f-e2fd-fb15dccf3a68" [ 990.751753] env[62383]: _type = "HttpNfcLease" [ 990.751753] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 990.768119] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 990.772887] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c0de6bb-923d-4094-8861-320175f93f7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.779134] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.780498] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 990.780498] env[62383]: value = "task-2452116" [ 990.780498] env[62383]: _type = "Task" [ 990.780498] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.784196] env[62383]: DEBUG nova.scheduler.client.report [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.793616] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.908818] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.909053] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.019564] env[62383]: DEBUG nova.network.neutron [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Successfully created port: 3d90ef19-0bb2-425b-929c-29a31ceac068 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.165523] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452113, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65012} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.165786] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] b451f9ad-cda6-49a3-801e-acbf121e9552/b451f9ad-cda6-49a3-801e-acbf121e9552.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 991.165997] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 991.166275] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d920921-afa6-4c7f-b767-73db0e76740f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.174235] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 991.174235] env[62383]: value = "task-2452117" [ 991.174235] env[62383]: _type = "Task" [ 991.174235] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.184282] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452117, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.252645] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 991.252645] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4c4ae-5c3f-b21f-e2fd-fb15dccf3a68" [ 991.252645] env[62383]: _type = "HttpNfcLease" [ 991.252645] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 991.252933] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 991.252933] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c4c4ae-5c3f-b21f-e2fd-fb15dccf3a68" [ 991.252933] env[62383]: _type = "HttpNfcLease" [ 991.252933] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 991.253673] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fd3e2d4-66f6-4831-aec5-b233b545c75f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.261449] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9ebe-c9ed-d1d3-1c6e-17f17a23cb1e/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 991.263130] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9ebe-c9ed-d1d3-1c6e-17f17a23cb1e/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 991.320385] env[62383]: DEBUG nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 991.323050] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.102s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.338200] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.898s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.342900] env[62383]: INFO nova.compute.claims [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.352175] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452114, 'name': PowerOffVM_Task, 'duration_secs': 0.825003} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.358848] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.359712] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.359712] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.359712] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.359997] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.359997] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.360391] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.360545] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.360791] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.360926] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.361366] env[62383]: DEBUG nova.virt.hardware [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.362793] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.362793] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452116, 'name': PowerOffVM_Task, 'duration_secs': 0.272016} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.363194] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8875edc-3558-4cec-be43-815b107df359 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.371017] env[62383]: INFO nova.scheduler.client.report [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleted allocations for instance c2fee51e-3cc9-421c-bfe5-b324a5b14197 [ 991.371017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c844c2-3aab-497a-8254-422aa581330d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.371859] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 991.372103] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance 'eedc7859-3882-4837-9419-f9edce5f12fa' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 991.398853] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-aa9da3fe-6c12-4ded-948b-0423ba776987 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.402412] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45b3a5e-0feb-44e8-bd58-6558eae91d73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.406864] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e508af-0eb4-4543-b545-d3dcb26b0ffd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.411813] env[62383]: DEBUG nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 991.470158] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.470882] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2ce2289-361c-44dc-82d7-fdb9f7cf1e1f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.478551] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 991.478551] env[62383]: value = "task-2452118" [ 991.478551] env[62383]: _type = "Task" [ 991.478551] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.488646] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 991.488998] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.489309] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.489653] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.489653] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.489930] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69ffb23a-49a1-4ace-acb9-a9b2f56f6064 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.499699] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.499935] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.500826] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cd6c44c-5eef-43d9-97d8-ad1e27c6dd04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.508850] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 991.508850] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5253033a-f8d6-1975-58d7-ec8cbe82b26d" [ 991.508850] env[62383]: _type = "Task" [ 991.508850] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.511445] env[62383]: DEBUG nova.compute.manager [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Received event network-changed-61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 991.511722] env[62383]: DEBUG nova.compute.manager [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Refreshing instance network info cache due to event network-changed-61bdafb3-8c09-454a-af63-5aaacc52947b. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 991.512047] env[62383]: DEBUG oslo_concurrency.lockutils [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] Acquiring lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.512132] env[62383]: DEBUG oslo_concurrency.lockutils [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] Acquired lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.512326] env[62383]: DEBUG nova.network.neutron [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Refreshing network info cache for port 61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 991.523888] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5253033a-f8d6-1975-58d7-ec8cbe82b26d, 'name': SearchDatastore_Task, 'duration_secs': 0.011857} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.525345] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a87b21ac-54d9-4694-aa2c-798c88174e78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.531382] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 991.531382] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e84ca3-b902-01cd-d774-9e0a30a2c4a9" [ 991.531382] env[62383]: _type = "Task" [ 991.531382] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.540053] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e84ca3-b902-01cd-d774-9e0a30a2c4a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.691042] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452117, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16358} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.691042] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 991.691042] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978edeb8-37f7-412b-a317-fb02abcc9cf4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.721122] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] b451f9ad-cda6-49a3-801e-acbf121e9552/b451f9ad-cda6-49a3-801e-acbf121e9552.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 991.721500] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c0bdd02-7ade-468d-9061-695d6c56377c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.742683] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 991.742683] env[62383]: value = "task-2452119" [ 991.742683] env[62383]: _type = "Task" [ 991.742683] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.750859] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452119, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.880904] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.881704] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.881704] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.881704] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.882456] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.882504] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.882821] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.882895] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.883068] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.883330] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.883537] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.891110] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4023c6d4-9f1a-42d2-813c-2680aae1829d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.903293] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f2822dfe-e45f-4d9a-b1b7-b3ccd37e8b50 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "c2fee51e-3cc9-421c-bfe5-b324a5b14197" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.189s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.910241] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 991.910241] env[62383]: value = "task-2452120" [ 991.910241] env[62383]: _type = "Task" [ 991.910241] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.921719] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452120, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.942588] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.043833] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e84ca3-b902-01cd-d774-9e0a30a2c4a9, 'name': SearchDatastore_Task, 'duration_secs': 0.012621} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.044312] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.045119] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. {{(pid=62383) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 992.045626] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-668a854b-084f-4863-ab1f-f5ed5602d329 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.056807] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 992.056807] env[62383]: value = "task-2452121" [ 992.056807] env[62383]: _type = "Task" [ 992.056807] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.066836] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.253982] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452119, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.259158] env[62383]: DEBUG nova.network.neutron [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Updated VIF entry in instance network info cache for port 61bdafb3-8c09-454a-af63-5aaacc52947b. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.259529] env[62383]: DEBUG nova.network.neutron [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Updating instance_info_cache with network_info: [{"id": "61bdafb3-8c09-454a-af63-5aaacc52947b", "address": "fa:16:3e:39:7e:34", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61bdafb3-8c", "ovs_interfaceid": "61bdafb3-8c09-454a-af63-5aaacc52947b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.424671] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452120, 'name': ReconfigVM_Task, 'duration_secs': 0.37223} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.425024] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance 'eedc7859-3882-4837-9419-f9edce5f12fa' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 992.500503] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "152567ba-f24c-4674-b06e-98c76a3da324" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.500638] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "152567ba-f24c-4674-b06e-98c76a3da324" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.501026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "152567ba-f24c-4674-b06e-98c76a3da324-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.502144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "152567ba-f24c-4674-b06e-98c76a3da324-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.502144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "152567ba-f24c-4674-b06e-98c76a3da324-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.505563] env[62383]: INFO nova.compute.manager [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Terminating instance [ 992.577657] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452121, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.753323] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452119, 'name': ReconfigVM_Task, 'duration_secs': 0.691467} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.754804] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Reconfigured VM instance instance-0000005f to attach disk [datastore1] b451f9ad-cda6-49a3-801e-acbf121e9552/b451f9ad-cda6-49a3-801e-acbf121e9552.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.755880] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34888022-06b8-49a6-8c67-3eaf065ea2da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.758614] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6afa5dc9-1b41-433b-beaa-1a427d73e040 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.762247] env[62383]: DEBUG oslo_concurrency.lockutils [req-ae43c55f-de21-4d0b-ba53-806c201a2a70 req-57131698-3e54-4999-a84e-96e1efeb3855 service nova] Releasing lock "refresh_cache-3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.766300] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01215f78-a1ea-410e-826a-b86f401abedd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.769708] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 992.769708] env[62383]: value = "task-2452122" [ 992.769708] env[62383]: _type = "Task" [ 992.769708] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.803056] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47fbc74-1c69-4ef1-8de5-296cdc36fa45 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.806452] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452122, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.811494] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f2d0eb-41a1-4846-a1a8-df8bc93b8690 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.827228] env[62383]: DEBUG nova.compute.provider_tree [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.934568] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 992.934971] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.935153] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 992.935351] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.935589] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 992.935805] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 992.936095] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 992.936375] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 992.936618] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 992.936909] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 992.937098] env[62383]: DEBUG nova.virt.hardware [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 992.942463] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfiguring VM instance instance-00000048 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 992.942761] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05f8b2e1-fc5c-4eb6-b135-a4d47a4c9156 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.957316] env[62383]: DEBUG nova.network.neutron [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Successfully updated port: 3d90ef19-0bb2-425b-929c-29a31ceac068 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.961591] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 992.961591] env[62383]: value = "task-2452123" [ 992.961591] env[62383]: _type = "Task" [ 992.961591] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.969894] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452123, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.010872] env[62383]: DEBUG nova.compute.manager [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 993.011593] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 993.012719] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd348ace-85c7-434f-80b0-145521d95a2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.020493] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.020745] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ca6fcaf-54af-469b-9beb-42b15ca829b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.026393] env[62383]: DEBUG oslo_vmware.api [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 993.026393] env[62383]: value = "task-2452124" [ 993.026393] env[62383]: _type = "Task" [ 993.026393] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.034760] env[62383]: DEBUG oslo_vmware.api [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.067748] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452121, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6804} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.068113] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk. [ 993.069019] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d7fabd-d6f6-45ce-956b-4fdbadaf06ee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.094647] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.095167] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cf1aeb0-c696-4d75-949e-91cbc3ff632f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.113654] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 993.113654] env[62383]: value = "task-2452125" [ 993.113654] env[62383]: _type = "Task" [ 993.113654] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.122118] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452125, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.281321] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452122, 'name': Rename_Task, 'duration_secs': 0.165247} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.281782] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 993.282729] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7e4ced9-1e01-4aa0-8abf-c181b9a1587e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.288525] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 993.288525] env[62383]: value = "task-2452126" [ 993.288525] env[62383]: _type = "Task" [ 993.288525] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.297250] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452126, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.330868] env[62383]: DEBUG nova.scheduler.client.report [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 993.460350] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "refresh_cache-1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.460517] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "refresh_cache-1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.460658] env[62383]: DEBUG nova.network.neutron [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.471693] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452123, 'name': ReconfigVM_Task, 'duration_secs': 0.242255} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.472733] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfigured VM instance instance-00000048 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 993.473650] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d335e3f-bc89-4956-b100-db235228532d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.504795] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.505571] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07040d15-4da6-44ef-abbc-38ea4bcec490 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.526300] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 993.526300] env[62383]: value = "task-2452127" [ 993.526300] env[62383]: _type = "Task" [ 993.526300] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.539034] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452127, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.542426] env[62383]: DEBUG oslo_vmware.api [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452124, 'name': PowerOffVM_Task, 'duration_secs': 0.200439} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.542426] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 993.542426] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 993.542426] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f425416b-2a91-4fc1-a515-c4fa1c18789c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.545463] env[62383]: DEBUG nova.compute.manager [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Received event network-vif-plugged-3d90ef19-0bb2-425b-929c-29a31ceac068 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 993.545754] env[62383]: DEBUG oslo_concurrency.lockutils [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] Acquiring lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.546050] env[62383]: DEBUG oslo_concurrency.lockutils [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.546278] env[62383]: DEBUG oslo_concurrency.lockutils [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.546507] env[62383]: DEBUG nova.compute.manager [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] No waiting events found dispatching network-vif-plugged-3d90ef19-0bb2-425b-929c-29a31ceac068 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 993.546677] env[62383]: WARNING nova.compute.manager [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Received unexpected event network-vif-plugged-3d90ef19-0bb2-425b-929c-29a31ceac068 for instance with vm_state building and task_state spawning. [ 993.546955] env[62383]: DEBUG nova.compute.manager [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Received event network-changed-3d90ef19-0bb2-425b-929c-29a31ceac068 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 993.547142] env[62383]: DEBUG nova.compute.manager [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Refreshing instance network info cache due to event network-changed-3d90ef19-0bb2-425b-929c-29a31ceac068. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 993.547359] env[62383]: DEBUG oslo_concurrency.lockutils [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] Acquiring lock "refresh_cache-1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.613719] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 993.614655] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 993.615366] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleting the datastore file [datastore2] 152567ba-f24c-4674-b06e-98c76a3da324 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.615366] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-909bd9af-1c1b-492d-af4a-b29b5a96d903 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.626228] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452125, 'name': ReconfigVM_Task, 'duration_secs': 0.384773} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.627606] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e/cac3b430-a1d5-4ad1-92ec-34c2261779a8-rescue.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 993.628821] env[62383]: DEBUG oslo_vmware.api [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for the task: (returnval){ [ 993.628821] env[62383]: value = "task-2452129" [ 993.628821] env[62383]: _type = "Task" [ 993.628821] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.629080] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8db037-8a5e-4527-8fc4-4f6433508232 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.640416] env[62383]: DEBUG oslo_vmware.api [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.662756] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8a42e1a-b047-4ab0-9021-fe49e270f554 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.679649] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 993.679649] env[62383]: value = "task-2452130" [ 993.679649] env[62383]: _type = "Task" [ 993.679649] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.688962] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452130, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.800059] env[62383]: DEBUG oslo_vmware.api [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452126, 'name': PowerOnVM_Task, 'duration_secs': 0.509815} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.800059] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.800059] env[62383]: INFO nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Took 11.38 seconds to spawn the instance on the hypervisor. [ 993.800558] env[62383]: DEBUG nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 993.801097] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec884ca9-8ca0-41e5-ac61-c41d5bd685bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.835827] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.498s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.836536] env[62383]: DEBUG nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 993.840638] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.831s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.842785] env[62383]: INFO nova.compute.claims [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.006139] env[62383]: DEBUG nova.network.neutron [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.037287] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452127, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.144694] env[62383]: DEBUG oslo_vmware.api [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Task: {'id': task-2452129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288054} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.144959] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 994.145165] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 994.145353] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 994.145530] env[62383]: INFO nova.compute.manager [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Took 1.13 seconds to destroy the instance on the hypervisor. [ 994.145772] env[62383]: DEBUG oslo.service.loopingcall [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.145966] env[62383]: DEBUG nova.compute.manager [-] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 994.146074] env[62383]: DEBUG nova.network.neutron [-] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 994.173770] env[62383]: DEBUG nova.network.neutron [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Updating instance_info_cache with network_info: [{"id": "3d90ef19-0bb2-425b-929c-29a31ceac068", "address": "fa:16:3e:36:75:2b", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d90ef19-0b", "ovs_interfaceid": "3d90ef19-0bb2-425b-929c-29a31ceac068", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.189906] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452130, 'name': ReconfigVM_Task, 'duration_secs': 0.21783} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.191178] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.191579] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97e5161d-b00d-4cab-90fd-edb6e7ea979e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.198584] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 994.198584] env[62383]: value = "task-2452131" [ 994.198584] env[62383]: _type = "Task" [ 994.198584] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.207585] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.322021] env[62383]: INFO nova.compute.manager [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Took 28.61 seconds to build instance. [ 994.348584] env[62383]: DEBUG nova.compute.utils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 994.352427] env[62383]: DEBUG nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 994.353027] env[62383]: DEBUG nova.network.neutron [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 994.442145] env[62383]: DEBUG nova.policy [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc391aae95a8405bab7801175514ac8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c15955328966463fa09401a270d95fe0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 994.536180] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452127, 'name': ReconfigVM_Task, 'duration_secs': 0.573016} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.537504] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfigured VM instance instance-00000048 to attach disk [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.537809] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance 'eedc7859-3882-4837-9419-f9edce5f12fa' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.542147] env[62383]: DEBUG nova.compute.manager [req-d5adb01c-8bae-4df5-94f4-b852bf890efb req-50170917-4cf6-44de-91b9-0fe62e2d0dbc service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Received event network-vif-deleted-3c2cbd45-1a44-495a-bfe1-6e6f90985ded {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 994.542344] env[62383]: INFO nova.compute.manager [req-d5adb01c-8bae-4df5-94f4-b852bf890efb req-50170917-4cf6-44de-91b9-0fe62e2d0dbc service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Neutron deleted interface 3c2cbd45-1a44-495a-bfe1-6e6f90985ded; detaching it from the instance and deleting it from the info cache [ 994.542518] env[62383]: DEBUG nova.network.neutron [req-d5adb01c-8bae-4df5-94f4-b852bf890efb req-50170917-4cf6-44de-91b9-0fe62e2d0dbc service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.678609] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "refresh_cache-1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.678609] env[62383]: DEBUG nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Instance network_info: |[{"id": "3d90ef19-0bb2-425b-929c-29a31ceac068", "address": "fa:16:3e:36:75:2b", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d90ef19-0b", "ovs_interfaceid": "3d90ef19-0bb2-425b-929c-29a31ceac068", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 994.678609] env[62383]: DEBUG oslo_concurrency.lockutils [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] Acquired lock "refresh_cache-1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.678609] env[62383]: DEBUG nova.network.neutron [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Refreshing network info cache for port 3d90ef19-0bb2-425b-929c-29a31ceac068 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.678609] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:75:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca83c3bc-f3ec-42ab-85b3-192512f766f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d90ef19-0bb2-425b-929c-29a31ceac068', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.686481] env[62383]: DEBUG oslo.service.loopingcall [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.686829] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.687079] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d85fa4b8-e6e7-40b0-9d5d-50a7d4a9031c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.713812] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452131, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.715409] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.715409] env[62383]: value = "task-2452132" [ 994.715409] env[62383]: _type = "Task" [ 994.715409] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.723833] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452132, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.824521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f419fbb3-680d-435f-b2e5-8845c43674a8 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.119s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.855945] env[62383]: DEBUG nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 994.884389] env[62383]: DEBUG nova.network.neutron [-] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.898019] env[62383]: DEBUG nova.network.neutron [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Successfully created port: a3f189ed-023b-4eb5-b181-dab2eff70488 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.047528] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14997870-7bbb-4569-ade0-e63a064ebbe4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.050915] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-319746e9-59bb-4f81-8c64-5f892dd6d678 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.074391] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d11ef7-9a51-41ae-acd1-a1574e55dce5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.079942] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ba6a80-4f9b-4a67-9fdd-171e1b4922da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.112664] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance 'eedc7859-3882-4837-9419-f9edce5f12fa' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 995.133186] env[62383]: DEBUG nova.compute.manager [req-d5adb01c-8bae-4df5-94f4-b852bf890efb req-50170917-4cf6-44de-91b9-0fe62e2d0dbc service nova] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Detach interface failed, port_id=3c2cbd45-1a44-495a-bfe1-6e6f90985ded, reason: Instance 152567ba-f24c-4674-b06e-98c76a3da324 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 995.216297] env[62383]: DEBUG oslo_vmware.api [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452131, 'name': PowerOnVM_Task, 'duration_secs': 0.629911} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.219768] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.224824] env[62383]: DEBUG nova.compute.manager [None req-4822064b-0d6c-4908-92f7-48dc69c8c916 tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.226295] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30acb07-45ec-49c2-91e2-5b050f02e95e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.234591] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452132, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.259805] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb6d9c4-af4c-457f-9127-4895d686fea1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.267833] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d67286b-c215-4697-be3f-e74a9e367a22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.306988] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56aa17f3-16ab-445a-82ca-9972bfbaddb6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.314864] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92d143b-e0a5-42bf-bf96-850f28734893 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.328395] env[62383]: DEBUG nova.compute.provider_tree [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.366792] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "b451f9ad-cda6-49a3-801e-acbf121e9552" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.367070] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.367299] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.367566] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 995.367717] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.369626] env[62383]: INFO nova.compute.manager [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Terminating instance [ 995.386565] env[62383]: INFO nova.compute.manager [-] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Took 1.24 seconds to deallocate network for instance. [ 995.445222] env[62383]: DEBUG nova.network.neutron [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Updated VIF entry in instance network info cache for port 3d90ef19-0bb2-425b-929c-29a31ceac068. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.445669] env[62383]: DEBUG nova.network.neutron [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Updating instance_info_cache with network_info: [{"id": "3d90ef19-0bb2-425b-929c-29a31ceac068", "address": "fa:16:3e:36:75:2b", "network": {"id": "a03fb7c0-a824-47f2-aa0f-5dd0a8fbc5d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1016387797-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35016a724e7e4fa2b0fc19396d8e736b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca83c3bc-f3ec-42ab-85b3-192512f766f3", "external-id": "nsx-vlan-transportzone-879", "segmentation_id": 879, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d90ef19-0b", "ovs_interfaceid": "3d90ef19-0bb2-425b-929c-29a31ceac068", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.727529] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452132, 'name': CreateVM_Task, 'duration_secs': 0.532055} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.730182] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.731162] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.731382] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.731927] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.732134] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-970bda08-a7c3-45bb-9b14-a5b42c84f589 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.737880] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 995.737880] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5257e843-17df-1f2d-f6e9-ba1837e5a092" [ 995.737880] env[62383]: _type = "Task" [ 995.737880] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.748217] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5257e843-17df-1f2d-f6e9-ba1837e5a092, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.834047] env[62383]: DEBUG nova.scheduler.client.report [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.868329] env[62383]: DEBUG nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 995.872998] env[62383]: DEBUG nova.compute.manager [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 995.873237] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.874070] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8181a0-f86d-469d-8091-9e699b5b38e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.882024] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.882024] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbdfb36d-de08-46ba-98a0-aeef31af2530 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.888873] env[62383]: DEBUG oslo_vmware.api [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 995.888873] env[62383]: value = "task-2452133" [ 995.888873] env[62383]: _type = "Task" [ 995.888873] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.893877] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.894123] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.894283] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.894472] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.894617] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.894761] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.894964] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.895139] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.895311] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.895471] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.895640] env[62383]: DEBUG nova.virt.hardware [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.896531] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.897317] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc949d0-6ccc-45f6-a34f-0a2e19f1a0b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.905303] env[62383]: DEBUG oslo_vmware.api [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.908632] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a38911b-2f57-4f0e-8033-c61bc7068c1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.948642] env[62383]: DEBUG oslo_concurrency.lockutils [req-f7314143-4612-4f68-a7f2-4eccd5180546 req-d9962cf1-b82c-4948-9528-0830e9ce38c0 service nova] Releasing lock "refresh_cache-1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.250912] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5257e843-17df-1f2d-f6e9-ba1837e5a092, 'name': SearchDatastore_Task, 'duration_secs': 0.016677} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.250912] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.251172] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.251579] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.251788] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.252029] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.252340] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-649dd393-c02a-4c1a-8ff4-21e75fb0d30a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.261939] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.262268] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.263241] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3662f0db-21d4-43f4-aeb9-244a7adff9c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.268974] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 996.268974] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5216d2bd-7da6-8440-52fa-9876e5d4a504" [ 996.268974] env[62383]: _type = "Task" [ 996.268974] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.278654] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5216d2bd-7da6-8440-52fa-9876e5d4a504, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.339551] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.340193] env[62383]: DEBUG nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 996.347021] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.379s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.347021] env[62383]: DEBUG nova.objects.instance [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lazy-loading 'resources' on Instance uuid 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.399951] env[62383]: DEBUG oslo_vmware.api [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452133, 'name': PowerOffVM_Task, 'duration_secs': 0.287865} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.400260] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.400429] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 996.400684] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ce080b3-77ca-4928-8ed0-8aa8c43034cb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.501903] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 996.502090] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 996.502325] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Deleting the datastore file [datastore1] b451f9ad-cda6-49a3-801e-acbf121e9552 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.502992] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40dc3996-afe1-43c5-9251-af4066da238b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.512317] env[62383]: DEBUG oslo_vmware.api [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for the task: (returnval){ [ 996.512317] env[62383]: value = "task-2452135" [ 996.512317] env[62383]: _type = "Task" [ 996.512317] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.520528] env[62383]: DEBUG oslo_vmware.api [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452135, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.598678] env[62383]: DEBUG nova.compute.manager [req-6dd610a9-0e8a-44b0-9bc2-afd50ac3c4de req-c1fb6a0a-a925-4cf5-bc4c-6672ea121f15 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Received event network-vif-plugged-a3f189ed-023b-4eb5-b181-dab2eff70488 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 996.599050] env[62383]: DEBUG oslo_concurrency.lockutils [req-6dd610a9-0e8a-44b0-9bc2-afd50ac3c4de req-c1fb6a0a-a925-4cf5-bc4c-6672ea121f15 service nova] Acquiring lock "4d929f43-cea2-41a0-9822-180a2647be2c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.599211] env[62383]: DEBUG oslo_concurrency.lockutils [req-6dd610a9-0e8a-44b0-9bc2-afd50ac3c4de req-c1fb6a0a-a925-4cf5-bc4c-6672ea121f15 service nova] Lock "4d929f43-cea2-41a0-9822-180a2647be2c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.599426] env[62383]: DEBUG oslo_concurrency.lockutils [req-6dd610a9-0e8a-44b0-9bc2-afd50ac3c4de req-c1fb6a0a-a925-4cf5-bc4c-6672ea121f15 service nova] Lock "4d929f43-cea2-41a0-9822-180a2647be2c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.599600] env[62383]: DEBUG nova.compute.manager [req-6dd610a9-0e8a-44b0-9bc2-afd50ac3c4de req-c1fb6a0a-a925-4cf5-bc4c-6672ea121f15 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] No waiting events found dispatching network-vif-plugged-a3f189ed-023b-4eb5-b181-dab2eff70488 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 996.599766] env[62383]: WARNING nova.compute.manager [req-6dd610a9-0e8a-44b0-9bc2-afd50ac3c4de req-c1fb6a0a-a925-4cf5-bc4c-6672ea121f15 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Received unexpected event network-vif-plugged-a3f189ed-023b-4eb5-b181-dab2eff70488 for instance with vm_state building and task_state spawning. [ 996.793355] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5216d2bd-7da6-8440-52fa-9876e5d4a504, 'name': SearchDatastore_Task, 'duration_secs': 0.010947} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.795533] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c413d6f-5b41-489d-a832-0378d70e0557 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.809019] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 996.809019] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52693fc5-1407-8e08-c68c-674cbb46b61c" [ 996.809019] env[62383]: _type = "Task" [ 996.809019] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.816542] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52693fc5-1407-8e08-c68c-674cbb46b61c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.820558] env[62383]: DEBUG nova.network.neutron [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Successfully updated port: a3f189ed-023b-4eb5-b181-dab2eff70488 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.850464] env[62383]: DEBUG nova.compute.utils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 996.853365] env[62383]: DEBUG nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 996.854443] env[62383]: DEBUG nova.network.neutron [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 996.856657] env[62383]: DEBUG nova.network.neutron [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Port 6ba23e44-2c77-442d-9aee-5a75d8abab68 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 996.946672] env[62383]: DEBUG nova.policy [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36584e4a6b9542918e45e11370c6cfbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b395bdf2df794b32a117f93fa4887c8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 997.023438] env[62383]: DEBUG oslo_vmware.api [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Task: {'id': task-2452135, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224736} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.029998] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.029998] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 997.029998] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 997.029998] env[62383]: INFO nova.compute.manager [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Took 1.15 seconds to destroy the instance on the hypervisor. [ 997.029998] env[62383]: DEBUG oslo.service.loopingcall [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 997.029998] env[62383]: DEBUG nova.compute.manager [-] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 997.029998] env[62383]: DEBUG nova.network.neutron [-] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 997.236175] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2c576a-e1b4-42e8-b849-716c42210100 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.245221] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e857c738-b916-4026-8125-0d3381f052d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.278104] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b4fc42-6a0d-4eef-a8fb-9b40bcfa82df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.285832] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0981f541-9748-4f0d-acc3-fcabb2ed9d14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.300074] env[62383]: DEBUG nova.compute.provider_tree [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.316050] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52693fc5-1407-8e08-c68c-674cbb46b61c, 'name': SearchDatastore_Task, 'duration_secs': 0.016841} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.316328] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.316593] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3/1919c4ae-0e30-42bf-b851-2e6c24ab1ae3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 997.316854] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20900438-0f0b-4fc5-8ad2-2b797fd06165 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.324065] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-4d929f43-cea2-41a0-9822-180a2647be2c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.324222] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-4d929f43-cea2-41a0-9822-180a2647be2c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.324406] env[62383]: DEBUG nova.network.neutron [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.325829] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 997.325829] env[62383]: value = "task-2452136" [ 997.325829] env[62383]: _type = "Task" [ 997.325829] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.340781] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452136, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.353172] env[62383]: DEBUG nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 997.804654] env[62383]: DEBUG nova.scheduler.client.report [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.845556] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452136, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.879168] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.879168] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.879518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.888347] env[62383]: DEBUG nova.network.neutron [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 997.958467] env[62383]: DEBUG nova.network.neutron [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Successfully created port: 275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.036410] env[62383]: DEBUG nova.network.neutron [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Updating instance_info_cache with network_info: [{"id": "a3f189ed-023b-4eb5-b181-dab2eff70488", "address": "fa:16:3e:ff:d1:41", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f189ed-02", "ovs_interfaceid": "a3f189ed-023b-4eb5-b181-dab2eff70488", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.152624] env[62383]: DEBUG nova.network.neutron [-] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.249583] env[62383]: DEBUG nova.compute.manager [req-6dd993c3-6081-40c1-82c2-090bbff7cc2c req-bac18178-40db-472b-9e02-97fe9f66a8bd service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received event network-vif-deleted-81e6feaa-5115-45fc-b1ca-4d39eef4a23e {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 998.314209] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.316880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.467s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.317518] env[62383]: DEBUG nova.objects.instance [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 998.341844] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452136, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741868} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.342292] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3/1919c4ae-0e30-42bf-b851-2e6c24ab1ae3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.342636] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.342983] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0be66a97-df26-48ae-b671-73d837ec2696 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.357323] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 998.357323] env[62383]: value = "task-2452137" [ 998.357323] env[62383]: _type = "Task" [ 998.357323] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.357323] env[62383]: INFO nova.scheduler.client.report [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Deleted allocations for instance 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d [ 998.376430] env[62383]: DEBUG nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 998.379854] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.421499] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 998.421618] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 998.421844] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 998.422106] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 998.422318] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 998.422509] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 998.422771] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 998.423044] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 998.423228] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 998.423463] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 998.423675] env[62383]: DEBUG nova.virt.hardware [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 998.424977] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d83e0e5-8d08-472f-89b8-fe787b74d4bd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.435195] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f483c0-0606-4310-83a3-97d1e2350e68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.540053] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-4d929f43-cea2-41a0-9822-180a2647be2c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.540053] env[62383]: DEBUG nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Instance network_info: |[{"id": "a3f189ed-023b-4eb5-b181-dab2eff70488", "address": "fa:16:3e:ff:d1:41", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f189ed-02", "ovs_interfaceid": "a3f189ed-023b-4eb5-b181-dab2eff70488", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 998.540621] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:d1:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3f189ed-023b-4eb5-b181-dab2eff70488', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 998.548629] env[62383]: DEBUG oslo.service.loopingcall [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.548973] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 998.549739] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03df1a96-1d6a-48b2-a7fa-a87cdee4bec1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.570088] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 998.570088] env[62383]: value = "task-2452138" [ 998.570088] env[62383]: _type = "Task" [ 998.570088] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.578190] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452138, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.656305] env[62383]: INFO nova.compute.manager [-] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Took 1.63 seconds to deallocate network for instance. [ 998.869971] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090273} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.875444] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.875444] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7520d0-1350-4b15-a3b3-7ba147ada0d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.878525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cafc38bd-b070-48ca-ae27-af0c1348c66c tempest-InstanceActionsNegativeTestJSON-202625021 tempest-InstanceActionsNegativeTestJSON-202625021-project-member] Lock "4cfea58a-35cc-4e3f-8f39-0bc00968eb4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.446s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.904982] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3/1919c4ae-0e30-42bf-b851-2e6c24ab1ae3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.906086] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-4cd9c7be-c5f4-460b-a9e2-e8f778076947-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.906379] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-4cd9c7be-c5f4-460b-a9e2-e8f778076947-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.906733] env[62383]: DEBUG nova.objects.instance [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'flavor' on Instance uuid 4cd9c7be-c5f4-460b-a9e2-e8f778076947 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.908278] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2558c614-e65a-482b-97ad-f1c4b403d660 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.934832] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 998.934832] env[62383]: value = "task-2452139" [ 998.934832] env[62383]: _type = "Task" [ 998.934832] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.943651] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.968651] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.968879] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.969158] env[62383]: DEBUG nova.network.neutron [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 999.081683] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452138, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.140750] env[62383]: DEBUG nova.compute.manager [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Received event network-changed-a3f189ed-023b-4eb5-b181-dab2eff70488 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 999.140994] env[62383]: DEBUG nova.compute.manager [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Refreshing instance network info cache due to event network-changed-a3f189ed-023b-4eb5-b181-dab2eff70488. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 999.144107] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Acquiring lock "refresh_cache-4d929f43-cea2-41a0-9822-180a2647be2c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.144107] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Acquired lock "refresh_cache-4d929f43-cea2-41a0-9822-180a2647be2c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.144107] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Refreshing network info cache for port a3f189ed-023b-4eb5-b181-dab2eff70488 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.164191] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.329471] env[62383]: DEBUG oslo_concurrency.lockutils [None req-10a353f9-c42a-4c6b-8b91-b652c59f5b01 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.334025] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.955s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.334025] env[62383]: DEBUG nova.objects.instance [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lazy-loading 'resources' on Instance uuid 8994780e-1b8f-4464-a303-a1e68206e770 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.446378] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.583149] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452138, 'name': CreateVM_Task, 'duration_secs': 0.540982} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.583149] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.583149] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.583149] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.583149] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 999.583585] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f2b05e7-29de-48b8-bbb0-d75a24aec178 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.588150] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 999.588150] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c67b3f-012c-9521-45a7-25d0e7d45fdf" [ 999.588150] env[62383]: _type = "Task" [ 999.588150] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.596488] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c67b3f-012c-9521-45a7-25d0e7d45fdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.598269] env[62383]: DEBUG nova.objects.instance [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'pci_requests' on Instance uuid 4cd9c7be-c5f4-460b-a9e2-e8f778076947 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.875587] env[62383]: DEBUG nova.network.neutron [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.949871] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452139, 'name': ReconfigVM_Task, 'duration_secs': 0.55694} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.951287] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3/1919c4ae-0e30-42bf-b851-2e6c24ab1ae3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.952495] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-875c0a8b-3054-46e7-9792-c1ccfe255704 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.961157] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 999.961157] env[62383]: value = "task-2452140" [ 999.961157] env[62383]: _type = "Task" [ 999.961157] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.976333] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452140, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.007397] env[62383]: DEBUG nova.network.neutron [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Successfully updated port: 275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1000.024629] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Updated VIF entry in instance network info cache for port a3f189ed-023b-4eb5-b181-dab2eff70488. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.024986] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Updating instance_info_cache with network_info: [{"id": "a3f189ed-023b-4eb5-b181-dab2eff70488", "address": "fa:16:3e:ff:d1:41", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3f189ed-02", "ovs_interfaceid": "a3f189ed-023b-4eb5-b181-dab2eff70488", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.101531] env[62383]: DEBUG nova.objects.base [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Object Instance<4cd9c7be-c5f4-460b-a9e2-e8f778076947> lazy-loaded attributes: flavor,pci_requests {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1000.101783] env[62383]: DEBUG nova.network.neutron [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1000.103671] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c67b3f-012c-9521-45a7-25d0e7d45fdf, 'name': SearchDatastore_Task, 'duration_secs': 0.017467} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.103977] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.104711] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.104711] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.104711] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.104838] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.105381] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-825a53b0-c6d7-450a-a375-39d7850ba1ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.118438] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.118536] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.119630] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-555a811a-8ee6-46fd-a123-d962ba3ebd61 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.128872] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1000.128872] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5209ec8b-77e3-e5c3-20be-909b19033d10" [ 1000.128872] env[62383]: _type = "Task" [ 1000.128872] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.137408] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5209ec8b-77e3-e5c3-20be-909b19033d10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.152841] env[62383]: DEBUG nova.policy [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1000.187396] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54c1ebf-d14c-4af2-a809-9a94e2785eba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.196713] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9a3f7f-e3e1-4b3a-a5f8-3f528bf3bca3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.231358] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6688aa8e-bcd4-4934-b499-0856d7b25c77 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.238937] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de416fa-02a4-4cdd-b166-d8511ddbe0a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.252827] env[62383]: DEBUG nova.compute.provider_tree [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.379463] env[62383]: DEBUG oslo_concurrency.lockutils [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.475845] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452140, 'name': Rename_Task, 'duration_secs': 0.22345} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.476749] env[62383]: DEBUG nova.network.neutron [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Successfully created port: fe300b59-d7d9-40d6-b3a4-feabee4c56a3 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.478824] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.479137] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-441715e3-0be6-4929-88ba-c5eaebab5774 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.486897] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 1000.486897] env[62383]: value = "task-2452141" [ 1000.486897] env[62383]: _type = "Task" [ 1000.486897] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.495804] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.514314] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.514623] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.514770] env[62383]: DEBUG nova.network.neutron [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1000.530561] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Releasing lock "refresh_cache-4d929f43-cea2-41a0-9822-180a2647be2c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.530955] env[62383]: DEBUG nova.compute.manager [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received event network-changed-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1000.531171] env[62383]: DEBUG nova.compute.manager [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing instance network info cache due to event network-changed-d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1000.531410] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Acquiring lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.531797] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Acquired lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.531797] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing network info cache for port d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1000.640505] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5209ec8b-77e3-e5c3-20be-909b19033d10, 'name': SearchDatastore_Task, 'duration_secs': 0.014319} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.643982] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-492b0bba-913b-4cf6-a5e2-a58337c739eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.647179] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1000.647179] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d5bf49-004b-ae8a-af94-cdcd4f11c35e" [ 1000.647179] env[62383]: _type = "Task" [ 1000.647179] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.655596] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d5bf49-004b-ae8a-af94-cdcd4f11c35e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.755747] env[62383]: DEBUG nova.scheduler.client.report [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.889648] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af31679-3a35-41e2-9ffa-c1a7171e6fd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.897428] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20effc85-e627-4b6f-9a52-5eb930e07fca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.997609] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452141, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.061377] env[62383]: DEBUG nova.network.neutron [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1001.162364] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d5bf49-004b-ae8a-af94-cdcd4f11c35e, 'name': SearchDatastore_Task, 'duration_secs': 0.014051} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.162634] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.162889] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4d929f43-cea2-41a0-9822-180a2647be2c/4d929f43-cea2-41a0-9822-180a2647be2c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.163169] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ccdcec7-c7f2-4f0f-8c26-46d42cce2d62 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.169713] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1001.169713] env[62383]: value = "task-2452142" [ 1001.169713] env[62383]: _type = "Task" [ 1001.169713] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.178283] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.227590] env[62383]: DEBUG nova.compute.manager [req-99282ebe-174e-44ea-b673-ba720e9fc94e req-d9c1cb7b-db04-491a-ae91-64aede99a571 service nova] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Received event network-vif-deleted-672a20b2-ffba-4603-8bbf-93199d33d5df {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1001.237301] env[62383]: DEBUG nova.network.neutron [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap275a086a-50", "ovs_interfaceid": "275a086a-5096-4414-8397-af9ac5331f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.250940] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updated VIF entry in instance network info cache for port d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1001.251467] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.262510] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.932s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.265908] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.458s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.265908] env[62383]: DEBUG nova.objects.instance [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lazy-loading 'resources' on Instance uuid 4e5bd3ee-605f-4770-b658-9cbc3d0010ab {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.292048] env[62383]: INFO nova.scheduler.client.report [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Deleted allocations for instance 8994780e-1b8f-4464-a303-a1e68206e770 [ 1001.393933] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9ebe-c9ed-d1d3-1c6e-17f17a23cb1e/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1001.394921] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c983e6cc-f6a0-4aad-9a1d-2fe93f6221dd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.401509] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9ebe-c9ed-d1d3-1c6e-17f17a23cb1e/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1001.401674] env[62383]: ERROR oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9ebe-c9ed-d1d3-1c6e-17f17a23cb1e/disk-0.vmdk due to incomplete transfer. [ 1001.401891] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e2283429-a41c-467f-8957-913d08097e90 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.408983] env[62383]: DEBUG oslo_vmware.rw_handles [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527f9ebe-c9ed-d1d3-1c6e-17f17a23cb1e/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1001.409234] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Uploaded image 83286fd2-a028-4799-8ed9-fae62546d213 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1001.411750] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1001.412325] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6a447272-6946-463b-a974-7fe00b0ca552 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.419120] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1001.419120] env[62383]: value = "task-2452143" [ 1001.419120] env[62383]: _type = "Task" [ 1001.419120] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.428062] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452143, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.501335] env[62383]: DEBUG oslo_vmware.api [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452141, 'name': PowerOnVM_Task, 'duration_secs': 0.82809} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.501653] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.501879] env[62383]: INFO nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Took 10.18 seconds to spawn the instance on the hypervisor. [ 1001.502096] env[62383]: DEBUG nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.502956] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03728892-48b1-4bd5-946b-ec5341093132 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.680524] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452142, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.741786] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.741786] env[62383]: DEBUG nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Instance network_info: |[{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap275a086a-50", "ovs_interfaceid": "275a086a-5096-4414-8397-af9ac5331f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1001.741786] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:a3:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7908211b-df93-467b-87a8-3c3d29b03de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '275a086a-5096-4414-8397-af9ac5331f87', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.754104] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating folder: Project (b395bdf2df794b32a117f93fa4887c8e). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.755065] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46aa6596-2dec-4c55-8e3f-0742158be2fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.758160] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Releasing lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.758498] env[62383]: DEBUG nova.compute.manager [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received event network-changed-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1001.758859] env[62383]: DEBUG nova.compute.manager [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing instance network info cache due to event network-changed-d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1001.759054] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Acquiring lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.759278] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Acquired lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.759517] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing network info cache for port d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1001.774070] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created folder: Project (b395bdf2df794b32a117f93fa4887c8e) in parent group-v496304. [ 1001.774282] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating folder: Instances. Parent ref: group-v496561. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1001.774533] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41d0b0fe-4ac9-4626-99b5-024dc0fb6f57 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.784247] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created folder: Instances in parent group-v496561. [ 1001.784622] env[62383]: DEBUG oslo.service.loopingcall [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.784720] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.784898] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ddc9298-6bc9-4028-a1f9-b1e8909122c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.806372] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9537fb80-8541-4d86-af36-94f352297e8d tempest-ServerDiskConfigTestJSON-396945699 tempest-ServerDiskConfigTestJSON-396945699-project-member] Lock "8994780e-1b8f-4464-a303-a1e68206e770" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.131s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.813506] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.813506] env[62383]: value = "task-2452146" [ 1001.813506] env[62383]: _type = "Task" [ 1001.813506] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.822757] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452146, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.929481] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452143, 'name': Destroy_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.009532] env[62383]: DEBUG nova.network.neutron [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Successfully updated port: fe300b59-d7d9-40d6-b3a4-feabee4c56a3 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.020388] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c41687-ca07-4179-ab4c-7959719f56a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.028106] env[62383]: INFO nova.compute.manager [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Took 29.79 seconds to build instance. [ 1002.050717] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3d2317-9b8b-4e8e-b83c-08421fad4498 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.061093] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance 'eedc7859-3882-4837-9419-f9edce5f12fa' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1002.105054] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1223cd7-b28f-4bc9-8913-334ce13f4b07 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.112319] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72910431-72ac-4608-9259-4463f1f4f186 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.144743] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8839d2-a9ba-45c4-affd-97bbe0a8bf35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.152972] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8f7d16-84a8-4f1b-a921-ecf687e76884 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.167829] env[62383]: DEBUG nova.compute.provider_tree [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.178656] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643214} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.179473] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 4d929f43-cea2-41a0-9822-180a2647be2c/4d929f43-cea2-41a0-9822-180a2647be2c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1002.179686] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1002.179930] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46cbb96e-8b52-4959-ab23-c5cf75784881 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.185632] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1002.185632] env[62383]: value = "task-2452147" [ 1002.185632] env[62383]: _type = "Task" [ 1002.185632] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.193396] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.263466] env[62383]: DEBUG nova.compute.manager [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-vif-plugged-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1002.263741] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.263974] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.264198] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.264411] env[62383]: DEBUG nova.compute.manager [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] No waiting events found dispatching network-vif-plugged-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1002.264596] env[62383]: WARNING nova.compute.manager [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received unexpected event network-vif-plugged-275a086a-5096-4414-8397-af9ac5331f87 for instance with vm_state building and task_state spawning. [ 1002.264780] env[62383]: DEBUG nova.compute.manager [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-changed-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1002.264978] env[62383]: DEBUG nova.compute.manager [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing instance network info cache due to event network-changed-275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1002.265223] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Acquiring lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.265403] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Acquired lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.265855] env[62383]: DEBUG nova.network.neutron [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing network info cache for port 275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1002.325019] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452146, 'name': CreateVM_Task, 'duration_secs': 0.366436} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.325019] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.325019] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.325019] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.325581] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1002.325935] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e95fafb-39a3-4766-9a2b-9b66848594ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.330889] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1002.330889] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5235aa6b-27b0-f83c-92b4-162afad2edf9" [ 1002.330889] env[62383]: _type = "Task" [ 1002.330889] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.341216] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5235aa6b-27b0-f83c-92b4-162afad2edf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.430346] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452143, 'name': Destroy_Task, 'duration_secs': 0.609075} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.431578] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Destroyed the VM [ 1002.431578] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1002.431578] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7b23fe3b-e3bf-495a-8f48-654bcbf42d04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.438285] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1002.438285] env[62383]: value = "task-2452148" [ 1002.438285] env[62383]: _type = "Task" [ 1002.438285] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.455733] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452148, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.485916] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updated VIF entry in instance network info cache for port d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1002.486390] env[62383]: DEBUG nova.network.neutron [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.514743] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.514954] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.515134] env[62383]: DEBUG nova.network.neutron [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.530124] env[62383]: DEBUG oslo_concurrency.lockutils [None req-de7d2cb7-ac9c-488f-b875-3a8a0fec9729 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.298s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.572488] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.572820] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2fc61ca-630d-449f-9d8d-d49d886b6eeb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.580750] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1002.580750] env[62383]: value = "task-2452149" [ 1002.580750] env[62383]: _type = "Task" [ 1002.580750] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.589434] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.671779] env[62383]: DEBUG nova.scheduler.client.report [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.696779] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062333} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.696779] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.697614] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fa06a6-97d5-4aed-9e06-83f2c74656bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.720485] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 4d929f43-cea2-41a0-9822-180a2647be2c/4d929f43-cea2-41a0-9822-180a2647be2c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.720791] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aaaf7311-5c99-4dfc-ab64-15f3e0c5f087 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.740506] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1002.740506] env[62383]: value = "task-2452150" [ 1002.740506] env[62383]: _type = "Task" [ 1002.740506] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.748281] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.843117] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5235aa6b-27b0-f83c-92b4-162afad2edf9, 'name': SearchDatastore_Task, 'duration_secs': 0.026258} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.843423] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.843653] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.843880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.844043] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.844246] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.844514] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38e7af49-b3ab-4c2f-ac2c-737e722c6c40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.852452] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.852641] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.853378] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c775d8c-d955-4212-a824-296377df1146 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.860798] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1002.860798] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525f1158-2fc9-e6e9-2327-1c25b6d86124" [ 1002.860798] env[62383]: _type = "Task" [ 1002.860798] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.863525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Acquiring lock "bab6bfc3-38f9-4f46-b383-35056f161292" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.863740] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Lock "bab6bfc3-38f9-4f46-b383-35056f161292" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.872646] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525f1158-2fc9-e6e9-2327-1c25b6d86124, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.949683] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452148, 'name': RemoveSnapshot_Task, 'duration_secs': 0.389448} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.949916] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1002.950236] env[62383]: DEBUG nova.compute.manager [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.951017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cc05f7-4d52-4266-b426-417c683090e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.992595] env[62383]: DEBUG oslo_concurrency.lockutils [req-1674f79f-3703-4da3-9c2c-05fa58f2a126 req-14cae2a3-f5b8-42b9-875b-07d4e5cf0a31 service nova] Releasing lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.090417] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452149, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.178028] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.181044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.238s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.184859] env[62383]: INFO nova.compute.claims [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1003.207176] env[62383]: INFO nova.scheduler.client.report [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Deleted allocations for instance 4e5bd3ee-605f-4770-b658-9cbc3d0010ab [ 1003.213655] env[62383]: WARNING nova.network.neutron [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] ce80ab32-a193-40db-be36-b8085e20a4c5 already exists in list: networks containing: ['ce80ab32-a193-40db-be36-b8085e20a4c5']. ignoring it [ 1003.216122] env[62383]: DEBUG nova.network.neutron [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updated VIF entry in instance network info cache for port 275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.216436] env[62383]: DEBUG nova.network.neutron [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap275a086a-50", "ovs_interfaceid": "275a086a-5096-4414-8397-af9ac5331f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.251256] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452150, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.369632] env[62383]: DEBUG nova.compute.manager [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1003.376885] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525f1158-2fc9-e6e9-2327-1c25b6d86124, 'name': SearchDatastore_Task, 'duration_secs': 0.029887} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.377662] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b9232f2-472a-43cc-acd4-06628d9d1710 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.385180] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1003.385180] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521f7933-5a29-97e6-7095-465a1ce4e7ed" [ 1003.385180] env[62383]: _type = "Task" [ 1003.385180] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.393402] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521f7933-5a29-97e6-7095-465a1ce4e7ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.463311] env[62383]: INFO nova.compute.manager [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Shelve offloading [ 1003.511031] env[62383]: DEBUG nova.network.neutron [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fe300b59-d7d9-40d6-b3a4-feabee4c56a3", "address": "fa:16:3e:90:f0:83", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe300b59-d7", "ovs_interfaceid": "fe300b59-d7d9-40d6-b3a4-feabee4c56a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.591338] env[62383]: DEBUG oslo_vmware.api [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452149, 'name': PowerOnVM_Task, 'duration_secs': 0.965928} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.591622] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.591813] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-22d91129-5869-4815-ad10-5d45c3e8d6d8 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance 'eedc7859-3882-4837-9419-f9edce5f12fa' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1003.597420] env[62383]: DEBUG nova.compute.manager [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received event network-vif-plugged-fe300b59-d7d9-40d6-b3a4-feabee4c56a3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.597681] env[62383]: DEBUG oslo_concurrency.lockutils [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.597828] env[62383]: DEBUG oslo_concurrency.lockutils [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.597993] env[62383]: DEBUG oslo_concurrency.lockutils [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.598174] env[62383]: DEBUG nova.compute.manager [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] No waiting events found dispatching network-vif-plugged-fe300b59-d7d9-40d6-b3a4-feabee4c56a3 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1003.598337] env[62383]: WARNING nova.compute.manager [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received unexpected event network-vif-plugged-fe300b59-d7d9-40d6-b3a4-feabee4c56a3 for instance with vm_state active and task_state None. [ 1003.598492] env[62383]: DEBUG nova.compute.manager [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received event network-changed-fe300b59-d7d9-40d6-b3a4-feabee4c56a3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.598645] env[62383]: DEBUG nova.compute.manager [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Refreshing instance network info cache due to event network-changed-fe300b59-d7d9-40d6-b3a4-feabee4c56a3. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1003.598805] env[62383]: DEBUG oslo_concurrency.lockutils [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] Acquiring lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.720166] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Releasing lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.720422] env[62383]: DEBUG nova.compute.manager [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received event network-changed-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1003.720592] env[62383]: DEBUG nova.compute.manager [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing instance network info cache due to event network-changed-d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1003.720793] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Acquiring lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.720935] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Acquired lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.721136] env[62383]: DEBUG nova.network.neutron [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing network info cache for port d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1003.722708] env[62383]: DEBUG oslo_concurrency.lockutils [None req-319c6fbe-d258-471e-8c08-0f52f5717708 tempest-ServerShowV254Test-701410761 tempest-ServerShowV254Test-701410761-project-member] Lock "4e5bd3ee-605f-4770-b658-9cbc3d0010ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.649s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.754702] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452150, 'name': ReconfigVM_Task, 'duration_secs': 0.864576} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.754702] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 4d929f43-cea2-41a0-9822-180a2647be2c/4d929f43-cea2-41a0-9822-180a2647be2c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.755381] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-947a8252-c9a2-499c-b586-b59c7b5de09f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.761932] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1003.761932] env[62383]: value = "task-2452151" [ 1003.761932] env[62383]: _type = "Task" [ 1003.761932] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.770274] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452151, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.892017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.895704] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521f7933-5a29-97e6-7095-465a1ce4e7ed, 'name': SearchDatastore_Task, 'duration_secs': 0.015691} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.895937] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.896305] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1003.896657] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40739ca6-89b8-496b-a516-1a2fffb9949b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.903702] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1003.903702] env[62383]: value = "task-2452152" [ 1003.903702] env[62383]: _type = "Task" [ 1003.903702] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.911456] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.970340] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.970683] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44cd1885-e2f9-4c40-8612-02533ee82e8a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.977575] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1003.977575] env[62383]: value = "task-2452153" [ 1003.977575] env[62383]: _type = "Task" [ 1003.977575] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.988157] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1003.988393] env[62383]: DEBUG nova.compute.manager [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.989215] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1e648b-a1a3-4d43-a158-6a6c4c4a178e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.995474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.995661] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.995833] env[62383]: DEBUG nova.network.neutron [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1004.014026] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.014695] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.014865] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.015224] env[62383]: DEBUG oslo_concurrency.lockutils [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] Acquired lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.015422] env[62383]: DEBUG nova.network.neutron [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Refreshing network info cache for port fe300b59-d7d9-40d6-b3a4-feabee4c56a3 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.017131] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8f8584-fccf-4df5-afb5-554d8e955fd0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.034624] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1004.034796] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.034956] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.035162] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.035310] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.035456] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1004.035798] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1004.035798] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1004.035970] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1004.036196] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1004.036391] env[62383]: DEBUG nova.virt.hardware [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1004.043061] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Reconfiguring VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1004.045278] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acc33cc3-b714-49d1-93ac-7d641dae5940 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.066658] env[62383]: DEBUG oslo_vmware.api [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1004.066658] env[62383]: value = "task-2452154" [ 1004.066658] env[62383]: _type = "Task" [ 1004.066658] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.075923] env[62383]: DEBUG oslo_vmware.api [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452154, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.275295] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452151, 'name': Rename_Task, 'duration_secs': 0.243282} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.278431] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.281213] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83fc08cc-1ec1-47f9-bb20-1dacac39a636 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.289973] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1004.289973] env[62383]: value = "task-2452155" [ 1004.289973] env[62383]: _type = "Task" [ 1004.289973] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.301384] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.417132] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452152, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.517517] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dde590c-1182-43cc-bff6-4cee58e071b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.528835] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e466c8-25e2-4e97-828a-9d84e8ab0237 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.533256] env[62383]: DEBUG nova.network.neutron [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updated VIF entry in instance network info cache for port d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.533664] env[62383]: DEBUG nova.network.neutron [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.563475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "688b0afd-a6e1-4c3f-999d-5975371e888e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.563475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.563760] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "688b0afd-a6e1-4c3f-999d-5975371e888e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.563989] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1004.564221] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.571044] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aafa0fc-dad9-41c0-970d-05007770a0ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.574090] env[62383]: INFO nova.compute.manager [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Terminating instance [ 1004.588705] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7510c03a-61a2-4e8c-9c11-a08ac63d3126 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.593177] env[62383]: DEBUG oslo_vmware.api [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452154, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.604234] env[62383]: DEBUG nova.compute.provider_tree [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.804861] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452155, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.805845] env[62383]: DEBUG nova.network.neutron [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updated VIF entry in instance network info cache for port fe300b59-d7d9-40d6-b3a4-feabee4c56a3. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1004.806733] env[62383]: DEBUG nova.network.neutron [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fe300b59-d7d9-40d6-b3a4-feabee4c56a3", "address": "fa:16:3e:90:f0:83", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe300b59-d7", "ovs_interfaceid": "fe300b59-d7d9-40d6-b3a4-feabee4c56a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.809404] env[62383]: DEBUG nova.network.neutron [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": "0afca8d2-b019-4a25-af28-7061dbf32e28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.915299] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742208} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.915561] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1004.915776] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.916035] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab97c159-f733-4509-a66b-6aa93afa8099 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.923324] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1004.923324] env[62383]: value = "task-2452156" [ 1004.923324] env[62383]: _type = "Task" [ 1004.923324] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.931355] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452156, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.011114] env[62383]: DEBUG nova.compute.manager [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received event network-changed-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1005.011114] env[62383]: DEBUG nova.compute.manager [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing instance network info cache due to event network-changed-d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1005.011233] env[62383]: DEBUG oslo_concurrency.lockutils [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] Acquiring lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.037202] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc276b8e-899d-48c4-9c4c-abe3ba533cc1 req-2353727c-7a13-4317-8de7-5fbad13336a0 service nova] Releasing lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.037650] env[62383]: DEBUG oslo_concurrency.lockutils [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] Acquired lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.037852] env[62383]: DEBUG nova.network.neutron [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Refreshing network info cache for port d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1005.083917] env[62383]: DEBUG nova.compute.manager [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1005.084195] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.084475] env[62383]: DEBUG oslo_vmware.api [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452154, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.085203] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260f59db-7057-46f6-842e-8370064ff190 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.091617] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.091839] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6b97512-b650-4c35-8ee2-145a943ab844 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.097299] env[62383]: DEBUG oslo_vmware.api [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 1005.097299] env[62383]: value = "task-2452157" [ 1005.097299] env[62383]: _type = "Task" [ 1005.097299] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.104771] env[62383]: DEBUG oslo_vmware.api [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.108096] env[62383]: DEBUG nova.scheduler.client.report [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1005.301040] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452155, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.310946] env[62383]: DEBUG oslo_concurrency.lockutils [req-08f9ed2d-b092-4529-9fa8-c187a53b15ff req-67954026-035f-4256-9081-e936e59ba12a service nova] Releasing lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.311722] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.435577] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452156, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071738} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.435848] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.436695] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760deca8-dd6c-40ee-b52d-a1f4a502793a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.465492] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.465835] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d97a4a79-691c-45d1-a3c9-8dba7be74ef1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.487371] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1005.487371] env[62383]: value = "task-2452158" [ 1005.487371] env[62383]: _type = "Task" [ 1005.487371] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.498484] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452158, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.589808] env[62383]: DEBUG oslo_vmware.api [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452154, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.606156] env[62383]: DEBUG oslo_vmware.api [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452157, 'name': PowerOffVM_Task, 'duration_secs': 0.343653} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.606557] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.606853] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.607351] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6321b519-2c10-4403-8b65-677ac52f7498 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.612057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.615018] env[62383]: DEBUG nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1005.620155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.722s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.620155] env[62383]: DEBUG nova.objects.instance [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lazy-loading 'resources' on Instance uuid 152567ba-f24c-4674-b06e-98c76a3da324 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.678131] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.678873] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.679741] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Deleting the datastore file [datastore1] 688b0afd-a6e1-4c3f-999d-5975371e888e {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.679741] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91a1d66b-cf90-492c-a482-2264677ab669 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.688391] env[62383]: DEBUG oslo_vmware.api [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for the task: (returnval){ [ 1005.688391] env[62383]: value = "task-2452160" [ 1005.688391] env[62383]: _type = "Task" [ 1005.688391] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.700204] env[62383]: DEBUG oslo_vmware.api [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452160, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.705319] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.706588] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f23e99-b92f-4c9c-b3bd-b34e7e1f753e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.714554] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.716123] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a204a83-8ac7-4d54-8d08-6826f8f15901 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.779641] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.779863] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.780056] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleting the datastore file [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.780332] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-923e991b-d923-47fe-8f51-8d1eb52c9571 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.788436] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1005.788436] env[62383]: value = "task-2452162" [ 1005.788436] env[62383]: _type = "Task" [ 1005.788436] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.800255] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.803122] env[62383]: DEBUG oslo_vmware.api [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452155, 'name': PowerOnVM_Task, 'duration_secs': 1.065198} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.803363] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.803645] env[62383]: INFO nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Took 9.94 seconds to spawn the instance on the hypervisor. [ 1005.803731] env[62383]: DEBUG nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.804461] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cd3de0-fd33-4a71-954b-765e83e000b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.806859] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.807091] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.807259] env[62383]: DEBUG nova.compute.manager [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.808017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f547d28-96f9-4b24-94a1-96e55654d06c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.813836] env[62383]: DEBUG nova.compute.manager [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1005.814394] env[62383]: DEBUG nova.objects.instance [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lazy-loading 'flavor' on Instance uuid 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.822070] env[62383]: DEBUG nova.network.neutron [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updated VIF entry in instance network info cache for port d094226a-fcbf-4faf-890b-89164713f11f. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.822410] env[62383]: DEBUG nova.network.neutron [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [{"id": "d094226a-fcbf-4faf-890b-89164713f11f", "address": "fa:16:3e:fb:41:07", "network": {"id": "8afc9f4a-8a5e-4f56-99c4-380df4921c2d", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-517038880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "74493a7d4f564707b9d1d9165d953244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd094226a-fc", "ovs_interfaceid": "d094226a-fcbf-4faf-890b-89164713f11f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.996525] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452158, 'name': ReconfigVM_Task, 'duration_secs': 0.313318} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.996904] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.997490] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9ab54d4-3ca5-492d-862e-76182ae0f697 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.003202] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1006.003202] env[62383]: value = "task-2452163" [ 1006.003202] env[62383]: _type = "Task" [ 1006.003202] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.011924] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452163, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.058242] env[62383]: DEBUG nova.compute.manager [req-486dfd1b-dff9-4092-b322-ed32752b6c74 req-74e74084-35ec-4a43-be76-75a28371f61d service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received event network-vif-unplugged-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1006.058242] env[62383]: DEBUG oslo_concurrency.lockutils [req-486dfd1b-dff9-4092-b322-ed32752b6c74 req-74e74084-35ec-4a43-be76-75a28371f61d service nova] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.058325] env[62383]: DEBUG oslo_concurrency.lockutils [req-486dfd1b-dff9-4092-b322-ed32752b6c74 req-74e74084-35ec-4a43-be76-75a28371f61d service nova] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.058473] env[62383]: DEBUG oslo_concurrency.lockutils [req-486dfd1b-dff9-4092-b322-ed32752b6c74 req-74e74084-35ec-4a43-be76-75a28371f61d service nova] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.058638] env[62383]: DEBUG nova.compute.manager [req-486dfd1b-dff9-4092-b322-ed32752b6c74 req-74e74084-35ec-4a43-be76-75a28371f61d service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] No waiting events found dispatching network-vif-unplugged-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1006.058808] env[62383]: WARNING nova.compute.manager [req-486dfd1b-dff9-4092-b322-ed32752b6c74 req-74e74084-35ec-4a43-be76-75a28371f61d service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received unexpected event network-vif-unplugged-0afca8d2-b019-4a25-af28-7061dbf32e28 for instance with vm_state shelved and task_state shelving_offloading. [ 1006.088437] env[62383]: DEBUG oslo_vmware.api [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452154, 'name': ReconfigVM_Task, 'duration_secs': 1.942564} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.088881] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.089166] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Reconfigured VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1006.126200] env[62383]: DEBUG nova.compute.utils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1006.130731] env[62383]: DEBUG nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1006.130941] env[62383]: DEBUG nova.network.neutron [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1006.195024] env[62383]: DEBUG nova.policy [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1626e18075dd45adbceb5117005b4af2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6ee1720bfaf4580a71e90b6cdae724a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1006.204085] env[62383]: DEBUG oslo_vmware.api [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Task: {'id': task-2452160, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.253063} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.206706] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.206905] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.207130] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.207319] env[62383]: INFO nova.compute.manager [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1006.207561] env[62383]: DEBUG oslo.service.loopingcall [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1006.207985] env[62383]: DEBUG nova.compute.manager [-] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1006.208106] env[62383]: DEBUG nova.network.neutron [-] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1006.302469] env[62383]: DEBUG oslo_vmware.api [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188458} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.302738] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.303607] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.303607] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.327845] env[62383]: INFO nova.scheduler.client.report [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleted allocations for instance c56464dd-63af-4686-b666-d0ac2df01ec1 [ 1006.331626] env[62383]: DEBUG oslo_concurrency.lockutils [req-803b55ba-4c9e-4c26-9f09-33225dfef44e req-d26357b1-bef9-4fee-92e4-299d599fa8a7 service nova] Releasing lock "refresh_cache-688b0afd-a6e1-4c3f-999d-5975371e888e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.335126] env[62383]: INFO nova.compute.manager [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Took 31.91 seconds to build instance. [ 1006.419028] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8268da73-448c-4296-8124-6f40f4f89821 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.427772] env[62383]: DEBUG nova.network.neutron [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Port 6ba23e44-2c77-442d-9aee-5a75d8abab68 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1006.428531] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.429529] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.429529] env[62383]: DEBUG nova.network.neutron [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.431511] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194020de-ac14-4754-9f72-ce835a8066fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.466392] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd07810-f8c6-4341-8c58-da93aea805bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.477547] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eef383-6296-497c-83c7-0f17343285e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.495981] env[62383]: DEBUG nova.compute.provider_tree [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.516036] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452163, 'name': Rename_Task, 'duration_secs': 0.14941} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.516330] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.516610] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b72e7ac8-5267-4903-bb3c-a0b50ee7a4e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.523644] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1006.523644] env[62383]: value = "task-2452164" [ 1006.523644] env[62383]: _type = "Task" [ 1006.523644] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.533311] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.595767] env[62383]: DEBUG oslo_concurrency.lockutils [None req-daaecce4-7bf3-4c29-a7ef-966506b40be5 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-4cd9c7be-c5f4-460b-a9e2-e8f778076947-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.689s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.631152] env[62383]: DEBUG nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1006.649836] env[62383]: DEBUG nova.network.neutron [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Successfully created port: 7543f1ad-e963-47ac-a972-6320079ef920 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.835313] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.835313] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.835530] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6d52b84-5373-4662-84a3-c7b6c01da8b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.839027] env[62383]: DEBUG oslo_concurrency.lockutils [None req-06eb493d-0f35-421e-889f-bdcaf0ebbdf9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.423s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.843931] env[62383]: DEBUG oslo_vmware.api [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 1006.843931] env[62383]: value = "task-2452165" [ 1006.843931] env[62383]: _type = "Task" [ 1006.843931] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.854716] env[62383]: DEBUG oslo_vmware.api [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.001398] env[62383]: DEBUG nova.scheduler.client.report [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1007.035992] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452164, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.126851] env[62383]: DEBUG nova.network.neutron [-] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.162046] env[62383]: DEBUG nova.network.neutron [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.356065] env[62383]: DEBUG oslo_vmware.api [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452165, 'name': PowerOffVM_Task, 'duration_secs': 0.443844} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.356065] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.356065] env[62383]: DEBUG nova.compute.manager [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.356697] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f098e3-b45e-49bc-9b4d-c98b27d9fbe0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.506695] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.510639] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.345s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.510886] env[62383]: DEBUG nova.objects.instance [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lazy-loading 'resources' on Instance uuid b451f9ad-cda6-49a3-801e-acbf121e9552 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.536401] env[62383]: DEBUG oslo_vmware.api [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452164, 'name': PowerOnVM_Task, 'duration_secs': 0.542869} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.536725] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.536926] env[62383]: INFO nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Took 9.16 seconds to spawn the instance on the hypervisor. [ 1007.537146] env[62383]: DEBUG nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.537928] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c582a352-e607-40b0-9e52-88209f08ee61 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.542417] env[62383]: INFO nova.scheduler.client.report [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Deleted allocations for instance 152567ba-f24c-4674-b06e-98c76a3da324 [ 1007.629747] env[62383]: INFO nova.compute.manager [-] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Took 1.42 seconds to deallocate network for instance. [ 1007.640754] env[62383]: DEBUG nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1007.664539] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1007.670469] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1007.670693] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.670850] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1007.671043] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.671200] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1007.671375] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1007.671593] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1007.671751] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1007.671915] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1007.672096] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1007.672277] env[62383]: DEBUG nova.virt.hardware [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1007.673339] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abbfa48-7d4f-4f09-8f3c-d3c0e5f94797 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.682507] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1260c7-8d33-4f58-ab9d-1be04613f922 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.701168] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "eb632e2d-b71e-446d-83a2-0bab1d823d27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.701347] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.701544] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "eb632e2d-b71e-446d-83a2-0bab1d823d27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.701719] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.701881] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.703856] env[62383]: INFO nova.compute.manager [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Terminating instance [ 1007.871724] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dc464a86-fde2-40f3-a27b-184cf1367d1c tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.042757] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4d929f43-cea2-41a0-9822-180a2647be2c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.042964] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.062917] env[62383]: INFO nova.compute.manager [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Took 32.08 seconds to build instance. [ 1008.065074] env[62383]: DEBUG oslo_concurrency.lockutils [None req-74037aeb-75a5-4f15-bc1b-4c586c5d5f95 tempest-ServerRescueNegativeTestJSON-588819548 tempest-ServerRescueNegativeTestJSON-588819548-project-member] Lock "152567ba-f24c-4674-b06e-98c76a3da324" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.565s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.121157] env[62383]: DEBUG nova.compute.manager [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received event network-changed-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1008.121368] env[62383]: DEBUG nova.compute.manager [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Refreshing instance network info cache due to event network-changed-0afca8d2-b019-4a25-af28-7061dbf32e28. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1008.121580] env[62383]: DEBUG oslo_concurrency.lockutils [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] Acquiring lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.121722] env[62383]: DEBUG oslo_concurrency.lockutils [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] Acquired lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.121902] env[62383]: DEBUG nova.network.neutron [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Refreshing network info cache for port 0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1008.135643] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.168679] env[62383]: DEBUG nova.compute.manager [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62383) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1008.210025] env[62383]: DEBUG nova.compute.manager [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.210294] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.211362] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51870a71-d159-4dbd-b322-bb7e44638d9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.219832] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.220069] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6c8bb58-87f9-4f0d-9148-e8ee9dd143bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.229366] env[62383]: DEBUG oslo_vmware.api [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1008.229366] env[62383]: value = "task-2452166" [ 1008.229366] env[62383]: _type = "Task" [ 1008.229366] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.241225] env[62383]: DEBUG oslo_vmware.api [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452166, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.292469] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc23498-0643-47e5-88ba-64395f38a5aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.300985] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37178c77-7371-4e06-9d93-2d6f85055380 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.334335] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0a4790-a493-4316-8bfc-6b9055877d8a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.342860] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a14d87c-2d2f-46d4-9fda-36c961377a51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.357785] env[62383]: DEBUG nova.compute.provider_tree [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.383590] env[62383]: DEBUG nova.network.neutron [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Successfully updated port: 7543f1ad-e963-47ac-a972-6320079ef920 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.551584] env[62383]: DEBUG nova.compute.utils [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1008.570157] env[62383]: DEBUG oslo_concurrency.lockutils [None req-67740352-63a2-4aa0-981d-554cb0a7d705 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.596s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.599707] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-4cd9c7be-c5f4-460b-a9e2-e8f778076947-fe300b59-d7d9-40d6-b3a4-feabee4c56a3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.599946] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-4cd9c7be-c5f4-460b-a9e2-e8f778076947-fe300b59-d7d9-40d6-b3a4-feabee4c56a3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.739771] env[62383]: DEBUG oslo_vmware.api [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452166, 'name': PowerOffVM_Task, 'duration_secs': 0.203657} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.740271] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.740271] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.740432] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42535322-5c22-42fe-b6eb-9b56811e2156 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.838504] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.838724] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.838912] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleting the datastore file [datastore2] eb632e2d-b71e-446d-83a2-0bab1d823d27 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.839221] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c281c24-d9dd-47ec-89e8-9c0026375845 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.846096] env[62383]: DEBUG oslo_vmware.api [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1008.846096] env[62383]: value = "task-2452168" [ 1008.846096] env[62383]: _type = "Task" [ 1008.846096] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.854248] env[62383]: DEBUG oslo_vmware.api [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.861119] env[62383]: DEBUG nova.scheduler.client.report [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1008.888997] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "refresh_cache-8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1008.889417] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquired lock "refresh_cache-8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.889607] env[62383]: DEBUG nova.network.neutron [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.946614] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.952530] env[62383]: DEBUG nova.network.neutron [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updated VIF entry in instance network info cache for port 0afca8d2-b019-4a25-af28-7061dbf32e28. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1008.952874] env[62383]: DEBUG nova.network.neutron [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.054726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.103096] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.103379] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.104285] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c810cd9-e86c-4357-97f0-4e32941be5de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.132548] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6f364b-81ff-4fb5-bbac-87d66faff3c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.158989] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Reconfiguring VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1009.159423] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4384428-28af-40f5-a640-daff7f4ef7de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.181723] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1009.181723] env[62383]: value = "task-2452169" [ 1009.181723] env[62383]: _type = "Task" [ 1009.181723] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.190705] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.304610] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.362072] env[62383]: DEBUG oslo_vmware.api [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.370143] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.374089] env[62383]: DEBUG nova.compute.manager [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-changed-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1009.374211] env[62383]: DEBUG nova.compute.manager [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing instance network info cache due to event network-changed-275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1009.374595] env[62383]: DEBUG oslo_concurrency.lockutils [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] Acquiring lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.374764] env[62383]: DEBUG oslo_concurrency.lockutils [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] Acquired lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.374989] env[62383]: DEBUG nova.network.neutron [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing network info cache for port 275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1009.376507] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.485s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.378889] env[62383]: INFO nova.compute.claims [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.410106] env[62383]: INFO nova.scheduler.client.report [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Deleted allocations for instance b451f9ad-cda6-49a3-801e-acbf121e9552 [ 1009.449054] env[62383]: DEBUG nova.network.neutron [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.456027] env[62383]: DEBUG oslo_concurrency.lockutils [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] Releasing lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.457029] env[62383]: DEBUG nova.compute.manager [req-08395d58-beac-4b78-b691-6c9f57add2c9 req-90b7a7a7-c10b-42ef-b0c9-d98636cadb10 service nova] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Received event network-vif-deleted-d094226a-fcbf-4faf-890b-89164713f11f {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1009.695347] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.736409] env[62383]: DEBUG nova.network.neutron [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Updating instance_info_cache with network_info: [{"id": "7543f1ad-e963-47ac-a972-6320079ef920", "address": "fa:16:3e:e8:01:3a", "network": {"id": "8d453d2c-28d6-46f7-bc0a-c6d572b9857b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-330172488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6ee1720bfaf4580a71e90b6cdae724a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7543f1ad-e9", "ovs_interfaceid": "7543f1ad-e963-47ac-a972-6320079ef920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.858471] env[62383]: DEBUG oslo_vmware.api [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.524077} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.862122] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.862398] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.862589] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.862775] env[62383]: INFO nova.compute.manager [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1009.863045] env[62383]: DEBUG oslo.service.loopingcall [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.868410] env[62383]: DEBUG nova.compute.manager [-] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.868508] env[62383]: DEBUG nova.network.neutron [-] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.885406] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.508s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.886235] env[62383]: DEBUG nova.compute.utils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Instance bab6bfc3-38f9-4f46-b383-35056f161292 could not be found. {{(pid=62383) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1009.887799] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.053s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.888503] env[62383]: DEBUG nova.objects.instance [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lazy-loading 'resources' on Instance uuid c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.892714] env[62383]: DEBUG nova.compute.manager [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Instance disappeared during build. {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2520}} [ 1009.892922] env[62383]: DEBUG nova.compute.manager [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Unplugging VIFs for instance {{(pid=62383) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1009.893201] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Acquiring lock "refresh_cache-bab6bfc3-38f9-4f46-b383-35056f161292" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.893416] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Acquired lock "refresh_cache-bab6bfc3-38f9-4f46-b383-35056f161292" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.893599] env[62383]: DEBUG nova.network.neutron [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.921740] env[62383]: DEBUG oslo_concurrency.lockutils [None req-11ce518c-2a0b-42ab-95cc-c81c4809dcc4 tempest-ServersTestMultiNic-266925673 tempest-ServersTestMultiNic-266925673-project-member] Lock "b451f9ad-cda6-49a3-801e-acbf121e9552" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.555s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.169860] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4d929f43-cea2-41a0-9822-180a2647be2c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.170211] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.170543] env[62383]: INFO nova.compute.manager [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Attaching volume c744bbf6-ff18-4c9f-8f11-c14e62047e05 to /dev/sdb [ 1010.199115] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.218124] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83018b86-477c-44ae-adca-6c55dbaaf79c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.224980] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def6bc87-1852-4d65-b2e8-27e327018ee4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.238639] env[62383]: DEBUG nova.virt.block_device [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Updating existing volume attachment record: 6e94561c-b96a-40f7-93c2-46592ab23c2b {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1010.241120] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Releasing lock "refresh_cache-8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.241419] env[62383]: DEBUG nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Instance network_info: |[{"id": "7543f1ad-e963-47ac-a972-6320079ef920", "address": "fa:16:3e:e8:01:3a", "network": {"id": "8d453d2c-28d6-46f7-bc0a-c6d572b9857b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-330172488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6ee1720bfaf4580a71e90b6cdae724a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7543f1ad-e9", "ovs_interfaceid": "7543f1ad-e963-47ac-a972-6320079ef920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1010.241796] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:01:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a4d142-3f97-47fe-b074-58923c46815e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7543f1ad-e963-47ac-a972-6320079ef920', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.249490] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Creating folder: Project (a6ee1720bfaf4580a71e90b6cdae724a). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1010.249675] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8d72059-2d72-43f7-bcb8-90fdc9affc10 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.260682] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Created folder: Project (a6ee1720bfaf4580a71e90b6cdae724a) in parent group-v496304. [ 1010.260920] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Creating folder: Instances. Parent ref: group-v496564. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1010.261138] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7f4e172-796e-48bf-a520-b4f2c0b18576 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.270414] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Created folder: Instances in parent group-v496564. [ 1010.270662] env[62383]: DEBUG oslo.service.loopingcall [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.270859] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.271101] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20a2ef8d-141a-4122-83e8-162dc3571b21 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.294060] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.294060] env[62383]: value = "task-2452172" [ 1010.294060] env[62383]: _type = "Task" [ 1010.294060] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.303949] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452172, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.365122] env[62383]: DEBUG nova.compute.manager [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Received event network-vif-plugged-7543f1ad-e963-47ac-a972-6320079ef920 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1010.365122] env[62383]: DEBUG oslo_concurrency.lockutils [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] Acquiring lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.365122] env[62383]: DEBUG oslo_concurrency.lockutils [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.365122] env[62383]: DEBUG oslo_concurrency.lockutils [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.365122] env[62383]: DEBUG nova.compute.manager [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] No waiting events found dispatching network-vif-plugged-7543f1ad-e963-47ac-a972-6320079ef920 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1010.365762] env[62383]: WARNING nova.compute.manager [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Received unexpected event network-vif-plugged-7543f1ad-e963-47ac-a972-6320079ef920 for instance with vm_state building and task_state spawning. [ 1010.365762] env[62383]: DEBUG nova.compute.manager [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Received event network-changed-7543f1ad-e963-47ac-a972-6320079ef920 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1010.365762] env[62383]: DEBUG nova.compute.manager [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Refreshing instance network info cache due to event network-changed-7543f1ad-e963-47ac-a972-6320079ef920. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1010.366144] env[62383]: DEBUG oslo_concurrency.lockutils [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] Acquiring lock "refresh_cache-8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.366144] env[62383]: DEBUG oslo_concurrency.lockutils [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] Acquired lock "refresh_cache-8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.366144] env[62383]: DEBUG nova.network.neutron [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Refreshing network info cache for port 7543f1ad-e963-47ac-a972-6320079ef920 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.379022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.379022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.379022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.379022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.379022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.380244] env[62383]: INFO nova.compute.manager [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Terminating instance [ 1010.392745] env[62383]: DEBUG nova.objects.instance [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lazy-loading 'numa_topology' on Instance uuid c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.400128] env[62383]: DEBUG nova.compute.utils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Can not refresh info_cache because instance was not found {{(pid=62383) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1055}} [ 1010.439085] env[62383]: DEBUG nova.network.neutron [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1010.535851] env[62383]: DEBUG nova.network.neutron [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.695736] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.757165] env[62383]: DEBUG nova.network.neutron [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updated VIF entry in instance network info cache for port 275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.757165] env[62383]: DEBUG nova.network.neutron [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap275a086a-50", "ovs_interfaceid": "275a086a-5096-4414-8397-af9ac5331f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.807725] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452172, 'name': CreateVM_Task, 'duration_secs': 0.441221} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.808095] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1010.808877] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.809556] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.809990] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1010.811099] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8872cea-d936-4feb-b641-a0e96cd2773c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.817266] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1010.817266] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52cc014b-b121-4cbf-4c5c-2093b407dc27" [ 1010.817266] env[62383]: _type = "Task" [ 1010.817266] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.826321] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cc014b-b121-4cbf-4c5c-2093b407dc27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.889150] env[62383]: DEBUG nova.compute.manager [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1010.889150] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1010.889150] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c342facf-23d1-4c5b-8318-048cb569f508 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.896018] env[62383]: DEBUG nova.objects.base [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1010.898287] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.900017] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f29de91b-2f9d-4a9b-9b49-78a772c1cbc0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.971250] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.971250] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.971250] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleting the datastore file [datastore2] 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.973637] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9e50124-c4b2-4f93-b05c-beb0404cb5d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.984860] env[62383]: DEBUG oslo_vmware.api [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 1010.984860] env[62383]: value = "task-2452177" [ 1010.984860] env[62383]: _type = "Task" [ 1010.984860] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.992733] env[62383]: DEBUG oslo_vmware.api [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.038331] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Releasing lock "refresh_cache-bab6bfc3-38f9-4f46-b383-35056f161292" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.038562] env[62383]: DEBUG nova.compute.manager [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62383) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1011.038786] env[62383]: DEBUG nova.compute.manager [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] [instance: bab6bfc3-38f9-4f46-b383-35056f161292] Skipping network deallocation for instance since networking was not requested. {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1011.201490] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.207722] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809955bb-12a9-4cf2-a61c-e1ccf9389b8c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.214946] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8718b19e-8f03-45d3-a065-eb1b32077335 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.253665] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cec5e0-ddc1-406b-b3af-92e9db45a19e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.259585] env[62383]: DEBUG oslo_concurrency.lockutils [req-fa8a56ed-ff86-4943-b60a-5f45ab16872a req-28ed7a4c-b93b-4b6b-abdb-b5b213e466cd service nova] Releasing lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.263070] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348be544-5132-4ae3-bd4d-884747240194 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.276646] env[62383]: DEBUG nova.compute.provider_tree [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.328750] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cc014b-b121-4cbf-4c5c-2093b407dc27, 'name': SearchDatastore_Task, 'duration_secs': 0.018645} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.328956] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.330052] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.330052] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1011.330052] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.330224] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.330456] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68ebded8-65e0-43f2-8097-420bf37c67cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.339527] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.339735] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.340626] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d5f4a43-bd70-4752-a9a2-e192fb508d7b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.346647] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1011.346647] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527d52ac-4c56-e631-800f-74c4847a82de" [ 1011.346647] env[62383]: _type = "Task" [ 1011.346647] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.355405] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527d52ac-4c56-e631-800f-74c4847a82de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.393297] env[62383]: DEBUG nova.network.neutron [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Updated VIF entry in instance network info cache for port 7543f1ad-e963-47ac-a972-6320079ef920. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.393775] env[62383]: DEBUG nova.network.neutron [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Updating instance_info_cache with network_info: [{"id": "7543f1ad-e963-47ac-a972-6320079ef920", "address": "fa:16:3e:e8:01:3a", "network": {"id": "8d453d2c-28d6-46f7-bc0a-c6d572b9857b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-330172488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6ee1720bfaf4580a71e90b6cdae724a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7543f1ad-e9", "ovs_interfaceid": "7543f1ad-e963-47ac-a972-6320079ef920", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.493190] env[62383]: DEBUG oslo_vmware.api [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267829} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.493468] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.493917] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.494137] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.494324] env[62383]: INFO nova.compute.manager [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1011.494765] env[62383]: DEBUG oslo.service.loopingcall [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.495044] env[62383]: DEBUG nova.compute.manager [-] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1011.495129] env[62383]: DEBUG nova.network.neutron [-] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.698767] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.781291] env[62383]: DEBUG nova.scheduler.client.report [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1011.787078] env[62383]: DEBUG nova.network.neutron [-] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.862019] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527d52ac-4c56-e631-800f-74c4847a82de, 'name': SearchDatastore_Task, 'duration_secs': 0.012233} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.862019] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b12ec044-f6e6-4870-9683-67441d6e9029 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.866418] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1011.866418] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5230e8f8-2a2e-5046-6d41-dd40b7df8452" [ 1011.866418] env[62383]: _type = "Task" [ 1011.866418] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.875023] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5230e8f8-2a2e-5046-6d41-dd40b7df8452, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.897433] env[62383]: DEBUG oslo_concurrency.lockutils [req-bae1de58-a2c5-4fc7-9998-bb19289bdd99 req-749f918e-5a4b-449f-96bb-2615798ab791 service nova] Releasing lock "refresh_cache-8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.063455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c16b8d57-7577-4b45-ab38-0284c55b4173 tempest-ServersListShow296Test-2038302708 tempest-ServersListShow296Test-2038302708-project-member] Lock "bab6bfc3-38f9-4f46-b383-35056f161292" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 9.199s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.199515] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.231413] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "20861554-890b-4ad3-a73f-0c825a79bbf1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.231660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.289670] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.402s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.292544] env[62383]: INFO nova.compute.manager [-] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Took 2.42 seconds to deallocate network for instance. [ 1012.293190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.157s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.293438] env[62383]: DEBUG nova.objects.instance [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lazy-loading 'resources' on Instance uuid 688b0afd-a6e1-4c3f-999d-5975371e888e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1012.344297] env[62383]: DEBUG nova.network.neutron [-] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.378648] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5230e8f8-2a2e-5046-6d41-dd40b7df8452, 'name': SearchDatastore_Task, 'duration_secs': 0.049384} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.380794] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1012.381139] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270/8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.384023] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f664181-36ca-4865-a9be-f2368a5b7ec6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.391601] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1012.391601] env[62383]: value = "task-2452178" [ 1012.391601] env[62383]: _type = "Task" [ 1012.391601] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.400139] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.402521] env[62383]: DEBUG nova.compute.manager [req-229144d5-48d2-4f54-96c0-bd0ff3e4e372 req-10f85157-5e39-4926-aa28-8af2ea3c4337 service nova] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Received event network-vif-deleted-b34b897a-3f37-4846-a7e9-0c248d1ecaf9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1012.402859] env[62383]: DEBUG nova.compute.manager [req-229144d5-48d2-4f54-96c0-bd0ff3e4e372 req-10f85157-5e39-4926-aa28-8af2ea3c4337 service nova] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Received event network-vif-deleted-3d90ef19-0bb2-425b-929c-29a31ceac068 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1012.651795] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "8f639983-e7ef-4a63-94b6-5c5256015937" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.652103] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "8f639983-e7ef-4a63-94b6-5c5256015937" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.705241] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.733571] env[62383]: DEBUG nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1012.800396] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4fda4420-0c7b-4f43-a5ab-e7a66257fcf5 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.826s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.801728] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.855s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.801728] env[62383]: INFO nova.compute.manager [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Unshelving [ 1012.808715] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.847882] env[62383]: INFO nova.compute.manager [-] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Took 1.35 seconds to deallocate network for instance. [ 1012.912376] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452178, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.023069] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "690dca62-cafb-40f7-92f0-9bbfde3467b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.023122] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.156023] env[62383]: DEBUG nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1013.170925] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91d209b-f2ff-4bab-84a5-5fae15904688 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.180162] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2aad1e2-2ec3-4f40-ae31-13aba1222dde {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.215300] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3292305b-eef4-4fcd-81e8-795a3ee86322 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.226920] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0544a053-15fb-4c9b-9cf7-740655eb9737 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.231692] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.247255] env[62383]: DEBUG nova.compute.provider_tree [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.264575] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.356217] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.402204] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542763} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.402605] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore1] 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270/8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.402710] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.403543] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25bbbeff-c509-4dab-8700-51e7e8169aa5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.412963] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1013.412963] env[62383]: value = "task-2452180" [ 1013.412963] env[62383]: _type = "Task" [ 1013.412963] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.423908] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452180, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.527173] env[62383]: DEBUG nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1013.680113] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.721138] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.752684] env[62383]: DEBUG nova.scheduler.client.report [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.835160] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.923221] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452180, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128666} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.923554] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.924409] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4427ed43-c9a0-493e-a70e-c9a17da908fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.946975] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270/8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.947231] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1cf02e6d-965e-4ffd-921f-fcbe82232f33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.966466] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1013.966466] env[62383]: value = "task-2452181" [ 1013.966466] env[62383]: _type = "Task" [ 1013.966466] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.974186] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452181, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.048545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.222082] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.258081] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.965s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.260425] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 4.956s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.278201] env[62383]: INFO nova.scheduler.client.report [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Deleted allocations for instance 688b0afd-a6e1-4c3f-999d-5975371e888e [ 1014.478340] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452181, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.728054] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.762904] env[62383]: DEBUG nova.objects.instance [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'migration_context' on Instance uuid eedc7859-3882-4837-9419-f9edce5f12fa {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.786842] env[62383]: DEBUG oslo_concurrency.lockutils [None req-412de508-d10b-4d2c-959a-b336d7d7821c tempest-ServerRescueTestJSONUnderV235-1720060412 tempest-ServerRescueTestJSONUnderV235-1720060412-project-member] Lock "688b0afd-a6e1-4c3f-999d-5975371e888e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.223s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.978455] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452181, 'name': ReconfigVM_Task, 'duration_secs': 0.720363} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.978793] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270/8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.979591] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9283411b-65c5-4dce-9d86-03777e45fea3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.987041] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1014.987041] env[62383]: value = "task-2452182" [ 1014.987041] env[62383]: _type = "Task" [ 1014.987041] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.000602] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452182, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.225150] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.301651] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1015.301889] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496568', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'name': 'volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4d929f43-cea2-41a0-9822-180a2647be2c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'serial': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1015.302778] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd57644-2787-49e7-8abf-4ae9eb303fe0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.324186] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9beca01-6407-495c-8ce5-c05c819b1cb9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.351391] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05/volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.352819] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d5bcc00-a4f9-469f-8626-773ad1785780 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.371772] env[62383]: DEBUG oslo_vmware.api [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1015.371772] env[62383]: value = "task-2452183" [ 1015.371772] env[62383]: _type = "Task" [ 1015.371772] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.381329] env[62383]: DEBUG oslo_vmware.api [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.506544] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452182, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.566321] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c53592a-269a-4328-b93d-8ea6fe5e3deb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.573796] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa027e8-840c-40fe-bd60-56fa817f31b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.612555] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6059d4-74b0-44f8-b143-c0ee180ff34b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.621353] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f91c7c7-ae41-4247-8ec5-0b254fef0324 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.635691] env[62383]: DEBUG nova.compute.provider_tree [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.727182] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.881763] env[62383]: DEBUG oslo_vmware.api [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.999761] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452182, 'name': Rename_Task, 'duration_secs': 0.789334} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.000451] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.000659] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d133faea-05f6-41ad-b4fa-13ad61a2d7b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.007259] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1016.007259] env[62383]: value = "task-2452184" [ 1016.007259] env[62383]: _type = "Task" [ 1016.007259] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.014923] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.139179] env[62383]: DEBUG nova.scheduler.client.report [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.224904] env[62383]: DEBUG oslo_vmware.api [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452169, 'name': ReconfigVM_Task, 'duration_secs': 6.841969} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.225196] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.225478] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Reconfigured VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1016.383540] env[62383]: DEBUG oslo_vmware.api [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452183, 'name': ReconfigVM_Task, 'duration_secs': 0.909093} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.383861] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Reconfigured VM instance instance-00000061 to attach disk [datastore2] volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05/volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.388646] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0db79308-e01f-42f3-b89e-bc44820de6f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.403678] env[62383]: DEBUG oslo_vmware.api [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1016.403678] env[62383]: value = "task-2452185" [ 1016.403678] env[62383]: _type = "Task" [ 1016.403678] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.413576] env[62383]: DEBUG oslo_vmware.api [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.482284] env[62383]: DEBUG nova.compute.manager [req-80909db1-d07a-43c6-91be-83e3c8b62a2f req-f597e510-bfd6-4b65-9257-06d1a4f18ad3 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received event network-vif-deleted-fe300b59-d7d9-40d6-b3a4-feabee4c56a3 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1016.482443] env[62383]: INFO nova.compute.manager [req-80909db1-d07a-43c6-91be-83e3c8b62a2f req-f597e510-bfd6-4b65-9257-06d1a4f18ad3 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Neutron deleted interface fe300b59-d7d9-40d6-b3a4-feabee4c56a3; detaching it from the instance and deleting it from the info cache [ 1016.482746] env[62383]: DEBUG nova.network.neutron [req-80909db1-d07a-43c6-91be-83e3c8b62a2f req-f597e510-bfd6-4b65-9257-06d1a4f18ad3 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.519474] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452184, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.914150] env[62383]: DEBUG oslo_vmware.api [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452185, 'name': ReconfigVM_Task, 'duration_secs': 0.187219} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.914458] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496568', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'name': 'volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4d929f43-cea2-41a0-9822-180a2647be2c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'serial': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1016.985392] env[62383]: DEBUG oslo_concurrency.lockutils [req-80909db1-d07a-43c6-91be-83e3c8b62a2f req-f597e510-bfd6-4b65-9257-06d1a4f18ad3 service nova] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1016.985575] env[62383]: DEBUG oslo_concurrency.lockutils [req-80909db1-d07a-43c6-91be-83e3c8b62a2f req-f597e510-bfd6-4b65-9257-06d1a4f18ad3 service nova] Acquired lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.986545] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685f0f89-16f1-4174-a438-f1d5c625ae9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.003669] env[62383]: DEBUG oslo_concurrency.lockutils [req-80909db1-d07a-43c6-91be-83e3c8b62a2f req-f597e510-bfd6-4b65-9257-06d1a4f18ad3 service nova] Releasing lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.003940] env[62383]: WARNING nova.compute.manager [req-80909db1-d07a-43c6-91be-83e3c8b62a2f req-f597e510-bfd6-4b65-9257-06d1a4f18ad3 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Detach interface failed, port_id=fe300b59-d7d9-40d6-b3a4-feabee4c56a3, reason: No device with interface-id fe300b59-d7d9-40d6-b3a4-feabee4c56a3 exists on VM: nova.exception.NotFound: No device with interface-id fe300b59-d7d9-40d6-b3a4-feabee4c56a3 exists on VM [ 1017.016243] env[62383]: DEBUG oslo_vmware.api [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452184, 'name': PowerOnVM_Task, 'duration_secs': 0.532062} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.016448] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1017.016642] env[62383]: INFO nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Took 9.38 seconds to spawn the instance on the hypervisor. [ 1017.016817] env[62383]: DEBUG nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.017549] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e78161e-f532-409a-96b3-a8f45124a112 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.151072] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.891s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.159605] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.351s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.159835] env[62383]: DEBUG nova.objects.instance [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'resources' on Instance uuid eb632e2d-b71e-446d-83a2-0bab1d823d27 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.462363] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.462602] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.462736] env[62383]: DEBUG nova.network.neutron [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1017.534860] env[62383]: INFO nova.compute.manager [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Took 25.62 seconds to build instance. [ 1017.906863] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.907166] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.907385] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.907568] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.907738] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.909773] env[62383]: INFO nova.compute.manager [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Terminating instance [ 1017.948450] env[62383]: DEBUG nova.objects.instance [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'flavor' on Instance uuid 4d929f43-cea2-41a0-9822-180a2647be2c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.963667] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16380a55-be28-4ec6-a880-69d447f0d79c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.973460] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bc02ba-77ba-414b-b888-f9d5170beb85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.007028] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e677e77-e071-4463-9b6f-3b774706b582 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.014496] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c032c985-4ddc-4663-bed6-e4358aa601c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.029322] env[62383]: DEBUG nova.compute.provider_tree [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.036427] env[62383]: DEBUG oslo_concurrency.lockutils [None req-673bbf52-f801-4931-9b85-d787a800b69f tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.127s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.204994] env[62383]: DEBUG nova.network.neutron [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [{"id": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "address": "fa:16:3e:ec:51:f4", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape527ebe3-bc", "ovs_interfaceid": "e527ebe3-bc3d-4e96-8325-891e543bdb39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.416374] env[62383]: DEBUG nova.compute.manager [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1018.416645] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1018.417570] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb547a0-a057-4713-a242-c9285c51c569 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.425178] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.425410] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bc7332e4-1da5-4e5f-a95f-663237ee7ae9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.431334] env[62383]: DEBUG oslo_vmware.api [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1018.431334] env[62383]: value = "task-2452186" [ 1018.431334] env[62383]: _type = "Task" [ 1018.431334] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.438897] env[62383]: DEBUG oslo_vmware.api [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452186, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.453917] env[62383]: DEBUG oslo_concurrency.lockutils [None req-02c26b5c-c671-47cb-bb81-d84ab4c6b22c tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.284s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.532276] env[62383]: DEBUG nova.scheduler.client.report [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1018.655217] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4d929f43-cea2-41a0-9822-180a2647be2c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.659125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.659125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "4d929f43-cea2-41a0-9822-180a2647be2c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.659125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.659125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.659337] env[62383]: INFO nova.compute.manager [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Terminating instance [ 1018.696385] env[62383]: INFO nova.compute.manager [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Swapping old allocation on dict_keys(['60615f54-0557-436e-a486-87505bffb4c7']) held by migration 512419a1-769c-4f8b-bdc6-fd406dd76c47 for instance [ 1018.707123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-4cd9c7be-c5f4-460b-a9e2-e8f778076947" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.722761] env[62383]: DEBUG nova.scheduler.client.report [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Overwriting current allocation {'allocations': {'60615f54-0557-436e-a486-87505bffb4c7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 140}}, 'project_id': '2439f3d802f34027b12d50f242a54ba3', 'user_id': 'cce49ef14f3a474c9448607425da3dc3', 'consumer_generation': 1} on consumer eedc7859-3882-4837-9419-f9edce5f12fa {{(pid=62383) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1018.797684] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.797684] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.797825] env[62383]: DEBUG nova.network.neutron [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1018.941709] env[62383]: DEBUG oslo_vmware.api [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452186, 'name': PowerOffVM_Task, 'duration_secs': 0.179056} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.942334] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.942334] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1018.942470] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31b8fc32-3f81-4cd5-a705-202cd406ba3b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.999268] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.999567] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.005967] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1019.006206] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1019.006389] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleting the datastore file [datastore2] 4cd9c7be-c5f4-460b-a9e2-e8f778076947 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.006643] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e6b03c1-6b3c-41c3-a99e-3925bcce7cce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.015713] env[62383]: DEBUG oslo_vmware.api [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1019.015713] env[62383]: value = "task-2452188" [ 1019.015713] env[62383]: _type = "Task" [ 1019.015713] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.024746] env[62383]: DEBUG oslo_vmware.api [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.040347] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.042565] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.778s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.044019] env[62383]: INFO nova.compute.claims [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.064094] env[62383]: INFO nova.scheduler.client.report [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted allocations for instance eb632e2d-b71e-446d-83a2-0bab1d823d27 [ 1019.166021] env[62383]: DEBUG nova.compute.manager [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1019.166021] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.166021] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba8067e5-6058-4b4c-a938-858892ab6624 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.173165] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1019.173165] env[62383]: value = "task-2452189" [ 1019.173165] env[62383]: _type = "Task" [ 1019.173165] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.182981] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452189, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.213371] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8c635e6e-7dcb-43c1-9617-ecabf7bb72b6 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-4cd9c7be-c5f4-460b-a9e2-e8f778076947-fe300b59-d7d9-40d6-b3a4-feabee4c56a3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.613s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.510467] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.510467] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1019.517093] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.517406] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.517626] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.517829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.517999] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.525377] env[62383]: INFO nova.compute.manager [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Terminating instance [ 1019.532282] env[62383]: DEBUG nova.network.neutron [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [{"id": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "address": "fa:16:3e:7f:55:d3", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ba23e44-2c", "ovs_interfaceid": "6ba23e44-2c77-442d-9aee-5a75d8abab68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.558420] env[62383]: DEBUG oslo_vmware.api [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157379} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.560033] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1019.560158] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1019.560338] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1019.560514] env[62383]: INFO nova.compute.manager [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1019.560762] env[62383]: DEBUG oslo.service.loopingcall [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1019.561205] env[62383]: DEBUG nova.compute.manager [-] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1019.561329] env[62383]: DEBUG nova.network.neutron [-] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1019.571252] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ea84dac-d51c-46d9-9f94-123225b81267 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "eb632e2d-b71e-446d-83a2-0bab1d823d27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.870s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.685159] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452189, 'name': PowerOffVM_Task, 'duration_secs': 0.182491} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.685454] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.685628] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1019.685814] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496568', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'name': 'volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4d929f43-cea2-41a0-9822-180a2647be2c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'serial': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1019.686678] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c0c24b-f3bb-413f-8876-987434c189a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.709215] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676c0a85-bef1-4757-a80e-c46054f0015d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.716676] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3d3ac6-ec3d-49c6-86d9-b7b63ecfed13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.738652] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b046ae98-f9c3-4ec9-995d-eea5665d93c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.752296] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] The volume has not been displaced from its original location: [datastore2] volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05/volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1019.757495] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1019.757773] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-023c1111-a571-4123-a690-032920a1e5f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.776235] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1019.776235] env[62383]: value = "task-2452190" [ 1019.776235] env[62383]: _type = "Task" [ 1019.776235] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.785563] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452190, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.045648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-eedc7859-3882-4837-9419-f9edce5f12fa" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.045648] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758bfa83-81de-46f2-bbe0-f7d13d729a88 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.049937] env[62383]: DEBUG nova.compute.manager [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1020.051624] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.053164] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0983d1-e9f6-4ad2-bb89-d0affe627c90 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.080059] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92ef2e1-7ec8-4447-b1b3-08654fdfc9b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.087093] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.087093] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-078784d3-eab5-4a93-91a9-7f1f4df09034 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.094022] env[62383]: DEBUG oslo_vmware.api [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1020.094022] env[62383]: value = "task-2452191" [ 1020.094022] env[62383]: _type = "Task" [ 1020.094022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.109594] env[62383]: DEBUG oslo_vmware.api [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.290748] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452190, 'name': ReconfigVM_Task, 'duration_secs': 0.450352} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.291464] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1020.301336] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-406b9706-a6ab-4551-89d3-d59ca44ff115 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.321148] env[62383]: DEBUG nova.compute.manager [req-0d68cea3-5812-4e5c-9660-55c5a4e7a2bd req-78fe8b09-103a-42f2-9a1e-ad91272c9a95 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Received event network-vif-deleted-e527ebe3-bc3d-4e96-8325-891e543bdb39 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1020.321494] env[62383]: INFO nova.compute.manager [req-0d68cea3-5812-4e5c-9660-55c5a4e7a2bd req-78fe8b09-103a-42f2-9a1e-ad91272c9a95 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Neutron deleted interface e527ebe3-bc3d-4e96-8325-891e543bdb39; detaching it from the instance and deleting it from the info cache [ 1020.321789] env[62383]: DEBUG nova.network.neutron [req-0d68cea3-5812-4e5c-9660-55c5a4e7a2bd req-78fe8b09-103a-42f2-9a1e-ad91272c9a95 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.324321] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1020.324321] env[62383]: value = "task-2452192" [ 1020.324321] env[62383]: _type = "Task" [ 1020.324321] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.335545] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.374145] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5c995a-a63e-4924-b59f-ff3628a289c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.380701] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fdb0cb-3fe6-45ac-838f-216bd19e295c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.422338] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f364276e-0d6d-48aa-ac8d-3e6695a0f7c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.430850] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95ef144-b6bf-4a69-8e3e-f386a4139231 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.446499] env[62383]: DEBUG nova.compute.provider_tree [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.603631] env[62383]: DEBUG oslo_vmware.api [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452191, 'name': PowerOffVM_Task, 'duration_secs': 0.19629} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.603921] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.604103] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.604373] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67791bbd-5ee0-4290-a532-b0f9a16e10d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.665301] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.665301] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.665301] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Deleting the datastore file [datastore1] 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.665609] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be14f2cf-4a77-4f69-9377-4c052b3019b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.677293] env[62383]: DEBUG oslo_vmware.api [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for the task: (returnval){ [ 1020.677293] env[62383]: value = "task-2452194" [ 1020.677293] env[62383]: _type = "Task" [ 1020.677293] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.688639] env[62383]: DEBUG oslo_vmware.api [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.750574] env[62383]: DEBUG nova.network.neutron [-] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.826184] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87f2750f-5fcd-46da-ac15-47b0cf456f27 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.837272] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452192, 'name': ReconfigVM_Task, 'duration_secs': 0.140969} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.838462] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496568', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'name': 'volume-c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4d929f43-cea2-41a0-9822-180a2647be2c', 'attached_at': '', 'detached_at': '', 'volume_id': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05', 'serial': 'c744bbf6-ff18-4c9f-8f11-c14e62047e05'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1020.838757] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.839587] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605718fe-e46a-425b-9578-3e82d63c6308 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.844520] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85c43cf-40dd-4e1f-a075-98e87625ace5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.859739] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.859739] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3b33b3d-a41a-4063-ba53-76c7149de8e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.875885] env[62383]: DEBUG nova.compute.manager [req-0d68cea3-5812-4e5c-9660-55c5a4e7a2bd req-78fe8b09-103a-42f2-9a1e-ad91272c9a95 service nova] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Detach interface failed, port_id=e527ebe3-bc3d-4e96-8325-891e543bdb39, reason: Instance 4cd9c7be-c5f4-460b-a9e2-e8f778076947 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1020.917824] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.918055] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.918271] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleting the datastore file [datastore2] 4d929f43-cea2-41a0-9822-180a2647be2c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.918562] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae9b526f-4113-4ec4-872a-3705ab28c67b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.925087] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1020.925087] env[62383]: value = "task-2452196" [ 1020.925087] env[62383]: _type = "Task" [ 1020.925087] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.933161] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.949168] env[62383]: DEBUG nova.scheduler.client.report [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.172862] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.173087] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.180335] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.183957] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f157ec94-ee55-42b8-a8bf-40a9cdf1eb47 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.191772] env[62383]: DEBUG oslo_vmware.api [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Task: {'id': task-2452194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13959} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.193029] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.193242] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.193449] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.193672] env[62383]: INFO nova.compute.manager [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1021.193911] env[62383]: DEBUG oslo.service.loopingcall [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1021.194172] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1021.194172] env[62383]: value = "task-2452197" [ 1021.194172] env[62383]: _type = "Task" [ 1021.194172] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.194359] env[62383]: DEBUG nova.compute.manager [-] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.194454] env[62383]: DEBUG nova.network.neutron [-] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.205036] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452197, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.255459] env[62383]: INFO nova.compute.manager [-] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Took 1.69 seconds to deallocate network for instance. [ 1021.436625] env[62383]: DEBUG oslo_vmware.api [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126139} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.437164] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.437164] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.437286] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.437494] env[62383]: INFO nova.compute.manager [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1021.437690] env[62383]: DEBUG oslo.service.loopingcall [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1021.437885] env[62383]: DEBUG nova.compute.manager [-] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.437976] env[62383]: DEBUG nova.network.neutron [-] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.454736] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.454736] env[62383]: DEBUG nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1021.457215] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.101s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.457440] env[62383]: DEBUG nova.objects.instance [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lazy-loading 'resources' on Instance uuid 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.676823] env[62383]: INFO nova.compute.manager [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Detaching volume 46a8babb-a07c-4277-8a54-ff1a519becfb [ 1021.705667] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452197, 'name': PowerOffVM_Task, 'duration_secs': 0.205794} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.705968] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.706629] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1021.706841] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.707121] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1021.707212] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.707329] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1021.707485] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1021.707687] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1021.707841] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1021.707999] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1021.708431] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1021.708431] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1021.714069] env[62383]: INFO nova.virt.block_device [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Attempting to driver detach volume 46a8babb-a07c-4277-8a54-ff1a519becfb from mountpoint /dev/sdb [ 1021.714303] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1021.714488] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496507', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'name': 'volume-46a8babb-a07c-4277-8a54-ff1a519becfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f05a2b-f323-4e4a-ac13-7f4745593be0', 'attached_at': '', 'detached_at': '', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'serial': '46a8babb-a07c-4277-8a54-ff1a519becfb'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1021.714966] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b39371a2-2f6b-4ae8-a21c-3531073e4d48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.725288] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425c4fca-2734-474e-936c-9602a443ce8c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.749417] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca90ddde-b169-4523-a5c6-e5d5f8d0a3c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.751948] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1021.751948] env[62383]: value = "task-2452198" [ 1021.751948] env[62383]: _type = "Task" [ 1021.751948] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.757479] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd79f3e-6609-4edc-8392-91c246871172 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.763073] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.763324] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.780749] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c947771-3a0e-4875-8768-5b04437d7171 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.794584] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] The volume has not been displaced from its original location: [datastore2] volume-46a8babb-a07c-4277-8a54-ff1a519becfb/volume-46a8babb-a07c-4277-8a54-ff1a519becfb.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1021.799812] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Reconfiguring VM instance instance-0000002b to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1021.800133] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d24e2f7a-0d15-4c35-81dd-4dcf6dc672b3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.817033] env[62383]: DEBUG oslo_vmware.api [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1021.817033] env[62383]: value = "task-2452199" [ 1021.817033] env[62383]: _type = "Task" [ 1021.817033] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.824433] env[62383]: DEBUG oslo_vmware.api [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452199, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.932595] env[62383]: DEBUG nova.network.neutron [-] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.959928] env[62383]: DEBUG nova.compute.utils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1021.964057] env[62383]: DEBUG nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1021.964057] env[62383]: DEBUG nova.network.neutron [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1022.003358] env[62383]: DEBUG nova.policy [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7dd6d207b93b431895b5bb3c749cddc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '439cb1d6c59f4407921de2276ff4f0a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1022.182806] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423ca569-3748-4a2c-b3b5-8bc42bfc27d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.185668] env[62383]: DEBUG nova.network.neutron [-] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.191473] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563a3775-5424-4d97-80fd-14cef77b72c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.227355] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccd9028-611e-42c9-9ecd-89d611c473cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.235017] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed70ae3e-ea72-4e46-8e52-d1f8080feb3f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.248562] env[62383]: DEBUG nova.compute.provider_tree [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1022.260485] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452198, 'name': ReconfigVM_Task, 'duration_secs': 0.146602} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.261337] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aa7bde-3c8f-4ee5-bed3-fceeeee93cfd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.282262] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1022.282519] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1022.282677] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1022.282859] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1022.283009] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1022.283207] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1022.283444] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1022.283599] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1022.283761] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1022.283922] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1022.284106] env[62383]: DEBUG nova.virt.hardware [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1022.285520] env[62383]: DEBUG nova.network.neutron [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Successfully created port: c23a7aba-1afc-4edb-b3ee-d6e718b18392 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1022.287400] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cb45446-1fd8-414b-9979-a0b7c97de133 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.292898] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1022.292898] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524df7df-c432-0c70-449a-36fbe62f6b20" [ 1022.292898] env[62383]: _type = "Task" [ 1022.292898] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.300211] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524df7df-c432-0c70-449a-36fbe62f6b20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.325166] env[62383]: DEBUG oslo_vmware.api [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452199, 'name': ReconfigVM_Task, 'duration_secs': 0.360934} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.328975] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Reconfigured VM instance instance-0000002b to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1022.330201] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b3d7e7f-bdc4-4123-90d6-3dd4b4032e51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.346030] env[62383]: DEBUG oslo_vmware.api [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1022.346030] env[62383]: value = "task-2452200" [ 1022.346030] env[62383]: _type = "Task" [ 1022.346030] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.354768] env[62383]: DEBUG oslo_vmware.api [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452200, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.357084] env[62383]: DEBUG nova.compute.manager [req-4541bd78-dd58-4fd4-b732-e7a4536bea9a req-7c5a3f6f-4cc8-4a82-a6ed-0dc828cebc8d service nova] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Received event network-vif-deleted-7543f1ad-e963-47ac-a972-6320079ef920 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1022.357287] env[62383]: DEBUG nova.compute.manager [req-4541bd78-dd58-4fd4-b732-e7a4536bea9a req-7c5a3f6f-4cc8-4a82-a6ed-0dc828cebc8d service nova] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Received event network-vif-deleted-a3f189ed-023b-4eb5-b181-dab2eff70488 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1022.435515] env[62383]: INFO nova.compute.manager [-] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Took 1.24 seconds to deallocate network for instance. [ 1022.468725] env[62383]: DEBUG nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1022.688069] env[62383]: INFO nova.compute.manager [-] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Took 1.25 seconds to deallocate network for instance. [ 1022.770087] env[62383]: ERROR nova.scheduler.client.report [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [req-e4e9f4da-26d7-404a-a8b2-84f4fe2c3751] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e4e9f4da-26d7-404a-a8b2-84f4fe2c3751"}]} [ 1022.787037] env[62383]: DEBUG nova.scheduler.client.report [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1022.803153] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524df7df-c432-0c70-449a-36fbe62f6b20, 'name': SearchDatastore_Task, 'duration_secs': 0.009023} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.804127] env[62383]: DEBUG nova.scheduler.client.report [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1022.804381] env[62383]: DEBUG nova.compute.provider_tree [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1022.811495] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfiguring VM instance instance-00000048 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1022.812282] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30a80cd4-9231-49b9-9d65-1fc8868b7bfc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.825453] env[62383]: DEBUG nova.scheduler.client.report [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1022.832628] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1022.832628] env[62383]: value = "task-2452201" [ 1022.832628] env[62383]: _type = "Task" [ 1022.832628] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.840904] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452201, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.845949] env[62383]: DEBUG nova.scheduler.client.report [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1022.859284] env[62383]: DEBUG oslo_vmware.api [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452200, 'name': ReconfigVM_Task, 'duration_secs': 0.239202} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.859634] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496507', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'name': 'volume-46a8babb-a07c-4277-8a54-ff1a519becfb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f05a2b-f323-4e4a-ac13-7f4745593be0', 'attached_at': '', 'detached_at': '', 'volume_id': '46a8babb-a07c-4277-8a54-ff1a519becfb', 'serial': '46a8babb-a07c-4277-8a54-ff1a519becfb'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1022.943693] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.061419] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc0f36e-528f-4239-bdbb-4b9f118c5154 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.068982] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b633b574-ca41-4d2a-8e7b-4d8da44ecd64 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.098164] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d844be-3472-40e4-bd59-cf6b8f7eb2f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.105285] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73960b7a-b2cc-49e8-aef2-c9d4f4ec622e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.119922] env[62383]: DEBUG nova.compute.provider_tree [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.234983] env[62383]: INFO nova.compute.manager [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Took 0.55 seconds to detach 1 volumes for instance. [ 1023.343108] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452201, 'name': ReconfigVM_Task, 'duration_secs': 0.203362} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.343391] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfigured VM instance instance-00000048 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1023.344163] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ecf98d-62e9-4619-be17-50187e85317c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.369850] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.370236] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41e4063e-4f41-4b92-be21-687dbf989196 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.390955] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1023.390955] env[62383]: value = "task-2452202" [ 1023.390955] env[62383]: _type = "Task" [ 1023.390955] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.399035] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452202, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.400764] env[62383]: DEBUG nova.objects.instance [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'flavor' on Instance uuid 67f05a2b-f323-4e4a-ac13-7f4745593be0 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.482041] env[62383]: DEBUG nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1023.508159] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1023.508407] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.508568] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1023.508748] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.508894] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1023.509088] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1023.509318] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1023.509482] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1023.509650] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1023.509814] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1023.510019] env[62383]: DEBUG nova.virt.hardware [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1023.510880] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a74fbb-003e-4edf-8fee-686122e28505 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.518323] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f3bbf8-dc5c-445c-a9c1-5dd0cd9562af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.653491] env[62383]: DEBUG nova.scheduler.client.report [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 141 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1023.653610] env[62383]: DEBUG nova.compute.provider_tree [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 141 to 142 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1023.654275] env[62383]: DEBUG nova.compute.provider_tree [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.742303] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.785357] env[62383]: DEBUG nova.network.neutron [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Successfully updated port: c23a7aba-1afc-4edb-b3ee-d6e718b18392 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1023.901130] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452202, 'name': ReconfigVM_Task, 'duration_secs': 0.285044} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.901404] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfigured VM instance instance-00000048 to attach disk [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa/eedc7859-3882-4837-9419-f9edce5f12fa.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1023.902241] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ca0694-c563-428f-9279-123be4a4c939 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.927136] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9391c685-b866-4bd5-bbfd-64f792256bdc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.948292] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c17ba49-8279-424b-8e2b-14f9f801071a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.969325] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed5dfd3-80ba-4d3f-8e50-2754123d318a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.975852] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.976112] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6911cdcf-c373-420c-84d1-ae37acbf6319 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.981895] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1023.981895] env[62383]: value = "task-2452203" [ 1023.981895] env[62383]: _type = "Task" [ 1023.981895] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.989094] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452203, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.162273] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.705s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.165037] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.485s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.166538] env[62383]: INFO nova.compute.claims [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.183932] env[62383]: INFO nova.scheduler.client.report [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted allocations for instance 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3 [ 1024.289504] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.289775] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.290123] env[62383]: DEBUG nova.network.neutron [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.383420] env[62383]: DEBUG nova.compute.manager [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Received event network-vif-plugged-c23a7aba-1afc-4edb-b3ee-d6e718b18392 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1024.383420] env[62383]: DEBUG oslo_concurrency.lockutils [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] Acquiring lock "20861554-890b-4ad3-a73f-0c825a79bbf1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.383601] env[62383]: DEBUG oslo_concurrency.lockutils [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.383757] env[62383]: DEBUG oslo_concurrency.lockutils [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.383924] env[62383]: DEBUG nova.compute.manager [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] No waiting events found dispatching network-vif-plugged-c23a7aba-1afc-4edb-b3ee-d6e718b18392 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1024.384109] env[62383]: WARNING nova.compute.manager [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Received unexpected event network-vif-plugged-c23a7aba-1afc-4edb-b3ee-d6e718b18392 for instance with vm_state building and task_state spawning. [ 1024.384273] env[62383]: DEBUG nova.compute.manager [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Received event network-changed-c23a7aba-1afc-4edb-b3ee-d6e718b18392 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1024.384422] env[62383]: DEBUG nova.compute.manager [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Refreshing instance network info cache due to event network-changed-c23a7aba-1afc-4edb-b3ee-d6e718b18392. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1024.384582] env[62383]: DEBUG oslo_concurrency.lockutils [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] Acquiring lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.409375] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a0c117af-6268-4021-a108-09b6b25bc57c tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.236s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.494150] env[62383]: DEBUG oslo_vmware.api [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452203, 'name': PowerOnVM_Task, 'duration_secs': 0.37318} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.494464] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.254503] env[62383]: DEBUG oslo_concurrency.lockutils [None req-360a36e7-bc98-4633-a42f-6b1cba7a0050 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "1919c4ae-0e30-42bf-b851-2e6c24ab1ae3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.878s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.295365] env[62383]: DEBUG nova.network.neutron [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.422881] env[62383]: DEBUG nova.network.neutron [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Updating instance_info_cache with network_info: [{"id": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "address": "fa:16:3e:94:33:9c", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23a7aba-1a", "ovs_interfaceid": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.766048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.766302] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.766525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "67f05a2b-f323-4e4a-ac13-7f4745593be0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.766710] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.766893] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.771826] env[62383]: INFO nova.compute.manager [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Terminating instance [ 1025.806078] env[62383]: INFO nova.compute.manager [None req-7f24fab0-3f2e-4fe4-9236-2797937e4d69 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance to original state: 'active' [ 1025.840192] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.840468] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.840677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.840872] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.841057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.845536] env[62383]: INFO nova.compute.manager [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Terminating instance [ 1025.926314] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.926314] env[62383]: DEBUG nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Instance network_info: |[{"id": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "address": "fa:16:3e:94:33:9c", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23a7aba-1a", "ovs_interfaceid": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1025.928279] env[62383]: DEBUG oslo_concurrency.lockutils [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] Acquired lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.928472] env[62383]: DEBUG nova.network.neutron [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Refreshing network info cache for port c23a7aba-1afc-4edb-b3ee-d6e718b18392 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1025.929646] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:33:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c23a7aba-1afc-4edb-b3ee-d6e718b18392', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.936879] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Creating folder: Project (439cb1d6c59f4407921de2276ff4f0a1). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1025.938032] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ed038bf-414d-476d-bc93-0eb39a88359a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.949883] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Created folder: Project (439cb1d6c59f4407921de2276ff4f0a1) in parent group-v496304. [ 1025.950081] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Creating folder: Instances. Parent ref: group-v496569. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1025.950324] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d252ddde-d459-4c84-8e9f-59da474760b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.959996] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1da8b8-17da-4314-9a03-f248f515759b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.962819] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Created folder: Instances in parent group-v496569. [ 1025.963055] env[62383]: DEBUG oslo.service.loopingcall [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.963563] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.963782] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e2d9c79-ee6c-491c-8d0a-6cba000c2bdb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.980506] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4d8753-3d57-4ce0-9ffe-1c542e377323 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.985550] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.985550] env[62383]: value = "task-2452206" [ 1025.985550] env[62383]: _type = "Task" [ 1025.985550] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.014353] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da6ce22-0c23-46e7-abb7-8f6991cf81d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.019685] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452206, 'name': CreateVM_Task} progress is 15%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.024272] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad04b0d5-5e4a-4543-a642-393d77867b03 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.039872] env[62383]: DEBUG nova.compute.provider_tree [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.265745] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Didn't find any instances for network info cache update. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1026.266039] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.266205] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.266314] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.266466] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.266607] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.266750] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.266919] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1026.267127] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.275450] env[62383]: DEBUG nova.compute.manager [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1026.275664] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.276557] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65475de4-902d-4000-929f-f067f8a1c4d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.285426] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.285704] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0380cdf4-1f66-4402-8a3a-9212d0d570d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.291080] env[62383]: DEBUG oslo_vmware.api [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1026.291080] env[62383]: value = "task-2452207" [ 1026.291080] env[62383]: _type = "Task" [ 1026.291080] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.302822] env[62383]: DEBUG oslo_vmware.api [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.350269] env[62383]: DEBUG nova.compute.manager [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1026.350269] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.351154] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67559aab-a96c-4965-886f-73a022889de1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.361070] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.361070] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-053e7d79-6c24-4f97-b5e8-08306216712c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.367085] env[62383]: DEBUG oslo_vmware.api [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 1026.367085] env[62383]: value = "task-2452208" [ 1026.367085] env[62383]: _type = "Task" [ 1026.367085] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.378609] env[62383]: DEBUG oslo_vmware.api [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.495550] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452206, 'name': CreateVM_Task, 'duration_secs': 0.285647} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.495953] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.496526] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.496696] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.497036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1026.497405] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c31c34a-bdfb-48cd-ba97-f17dcbb38648 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.502783] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1026.502783] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520a9da1-2928-2e76-a497-8868c6e9aa51" [ 1026.502783] env[62383]: _type = "Task" [ 1026.502783] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.512217] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520a9da1-2928-2e76-a497-8868c6e9aa51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.545994] env[62383]: DEBUG nova.scheduler.client.report [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.647633] env[62383]: DEBUG nova.network.neutron [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Updated VIF entry in instance network info cache for port c23a7aba-1afc-4edb-b3ee-d6e718b18392. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1026.647991] env[62383]: DEBUG nova.network.neutron [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Updating instance_info_cache with network_info: [{"id": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "address": "fa:16:3e:94:33:9c", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23a7aba-1a", "ovs_interfaceid": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.770235] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.802044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "eedc7859-3882-4837-9419-f9edce5f12fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.802044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.802044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.802044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.802044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.803625] env[62383]: DEBUG oslo_vmware.api [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452207, 'name': PowerOffVM_Task, 'duration_secs': 0.225634} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.804051] env[62383]: INFO nova.compute.manager [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Terminating instance [ 1026.805335] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.805506] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.806567] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8789615-faa3-458f-b9c8-cd77ed6ab01b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.871923] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.872148] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.872332] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleting the datastore file [datastore2] 67f05a2b-f323-4e4a-ac13-7f4745593be0 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.872917] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07f2366d-a837-4881-8a5b-77f1c251aca4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.878075] env[62383]: DEBUG oslo_vmware.api [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452208, 'name': PowerOffVM_Task, 'duration_secs': 0.231265} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.878626] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.878801] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.879068] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c327c68-3690-4796-ac6f-41783fa9ee38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.884156] env[62383]: DEBUG oslo_vmware.api [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1026.884156] env[62383]: value = "task-2452210" [ 1026.884156] env[62383]: _type = "Task" [ 1026.884156] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.891256] env[62383]: DEBUG oslo_vmware.api [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.936999] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.937259] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.937446] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleting the datastore file [datastore2] 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.937711] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc809024-0716-47a7-b0b7-6506b605ebd5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.943592] env[62383]: DEBUG oslo_vmware.api [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for the task: (returnval){ [ 1026.943592] env[62383]: value = "task-2452212" [ 1026.943592] env[62383]: _type = "Task" [ 1026.943592] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.951672] env[62383]: DEBUG oslo_vmware.api [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.013684] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520a9da1-2928-2e76-a497-8868c6e9aa51, 'name': SearchDatastore_Task, 'duration_secs': 0.012688} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.013983] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.014257] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.014512] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.014668] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.014865] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.015143] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2398c666-1a9b-4490-975a-2bcf1b782d70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.030978] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.031173] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.031902] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-343a64ba-5750-41fb-b3b2-eb9e05ebfc8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.036822] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1027.036822] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]529ecf89-55ed-b270-1b04-2f6bbf40e3f4" [ 1027.036822] env[62383]: _type = "Task" [ 1027.036822] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.044808] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529ecf89-55ed-b270-1b04-2f6bbf40e3f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.050661] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.886s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.051195] env[62383]: DEBUG nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.053763] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.219s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.054009] env[62383]: DEBUG nova.objects.instance [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lazy-loading 'pci_requests' on Instance uuid c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.150716] env[62383]: DEBUG oslo_concurrency.lockutils [req-090cef72-8170-47f7-91cc-3fee512f8acb req-1acb52f7-562f-4b32-bf32-4bca8199445b service nova] Releasing lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.309893] env[62383]: DEBUG nova.compute.manager [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1027.310193] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.310473] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c960089-097a-4b0d-90be-aff535873f06 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.318036] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1027.318036] env[62383]: value = "task-2452213" [ 1027.318036] env[62383]: _type = "Task" [ 1027.318036] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.326721] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.395634] env[62383]: DEBUG oslo_vmware.api [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23506} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.395904] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.397055] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.397055] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.397055] env[62383]: INFO nova.compute.manager [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1027.397055] env[62383]: DEBUG oslo.service.loopingcall [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.397055] env[62383]: DEBUG nova.compute.manager [-] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1027.397055] env[62383]: DEBUG nova.network.neutron [-] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1027.452745] env[62383]: DEBUG oslo_vmware.api [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Task: {'id': task-2452212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248739} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.455138] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.455305] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.455483] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.455655] env[62383]: INFO nova.compute.manager [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1027.455894] env[62383]: DEBUG oslo.service.loopingcall [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.456129] env[62383]: DEBUG nova.compute.manager [-] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1027.456271] env[62383]: DEBUG nova.network.neutron [-] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1027.546888] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]529ecf89-55ed-b270-1b04-2f6bbf40e3f4, 'name': SearchDatastore_Task, 'duration_secs': 0.010362} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.547939] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6f42ace-d15b-4a5a-b39c-5339968aa0b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.553629] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1027.553629] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528e6d27-8a8c-d026-ef63-16d67f8c5718" [ 1027.553629] env[62383]: _type = "Task" [ 1027.553629] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.558205] env[62383]: DEBUG nova.objects.instance [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lazy-loading 'numa_topology' on Instance uuid c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.560427] env[62383]: DEBUG nova.compute.utils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1027.561896] env[62383]: DEBUG nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1027.562190] env[62383]: DEBUG nova.network.neutron [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1027.569453] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528e6d27-8a8c-d026-ef63-16d67f8c5718, 'name': SearchDatastore_Task, 'duration_secs': 0.01095} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.572310] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.572688] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 20861554-890b-4ad3-a73f-0c825a79bbf1/20861554-890b-4ad3-a73f-0c825a79bbf1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.573398] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98832d29-95cf-45e5-ba9c-9caee4b898ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.584218] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1027.584218] env[62383]: value = "task-2452214" [ 1027.584218] env[62383]: _type = "Task" [ 1027.584218] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.593057] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.605025] env[62383]: DEBUG nova.policy [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7dd6d207b93b431895b5bb3c749cddc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '439cb1d6c59f4407921de2276ff4f0a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1027.830915] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452213, 'name': PowerOffVM_Task, 'duration_secs': 0.226065} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.831262] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.831492] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1027.831691] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496542', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'name': 'volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'eedc7859-3882-4837-9419-f9edce5f12fa', 'attached_at': '2025-02-11T15:32:25.000000', 'detached_at': '', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'serial': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1027.832716] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7032c06d-24e8-469d-bb86-311c08ead803 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.865884] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2408f645-49f6-418b-ba0b-e0e01acc98d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.873147] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff1529f-1a4b-4f7a-a915-99a6914a7fba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.902327] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ef33c9-7d59-4955-8124-3c122313af62 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.921115] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] The volume has not been displaced from its original location: [datastore1] volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b/volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1027.926690] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1027.927146] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-433d9cb2-373f-40ec-ac58-572400a37e44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.945859] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1027.945859] env[62383]: value = "task-2452215" [ 1027.945859] env[62383]: _type = "Task" [ 1027.945859] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.959036] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452215, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.976151] env[62383]: DEBUG nova.compute.manager [req-bc1157ad-d23c-4b6e-8265-a034d92f36c8 req-673d8e93-f277-4ef4-9fd7-e1370c1c8f97 service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Received event network-vif-deleted-5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1027.976314] env[62383]: INFO nova.compute.manager [req-bc1157ad-d23c-4b6e-8265-a034d92f36c8 req-673d8e93-f277-4ef4-9fd7-e1370c1c8f97 service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Neutron deleted interface 5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf; detaching it from the instance and deleting it from the info cache [ 1027.976497] env[62383]: DEBUG nova.network.neutron [req-bc1157ad-d23c-4b6e-8265-a034d92f36c8 req-673d8e93-f277-4ef4-9fd7-e1370c1c8f97 service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.980382] env[62383]: DEBUG nova.network.neutron [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Successfully created port: 59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.062806] env[62383]: INFO nova.compute.claims [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.065703] env[62383]: DEBUG nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.093215] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452214, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.452432] env[62383]: DEBUG nova.network.neutron [-] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.459922] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452215, 'name': ReconfigVM_Task, 'duration_secs': 0.374436} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.459922] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1028.464475] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b1a2dc7-057b-49d5-96da-e47ee1a81f67 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.480577] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1028.480577] env[62383]: value = "task-2452216" [ 1028.480577] env[62383]: _type = "Task" [ 1028.480577] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.485058] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5a08f52-b078-4f1f-af52-cced4fb4c1ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.492406] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452216, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.496372] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fa95f5-7838-4b0d-a787-15342693255f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.528857] env[62383]: DEBUG nova.compute.manager [req-bc1157ad-d23c-4b6e-8265-a034d92f36c8 req-673d8e93-f277-4ef4-9fd7-e1370c1c8f97 service nova] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Detach interface failed, port_id=5c1a084e-a9a1-42de-b4c7-8dc9dcac78bf, reason: Instance 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1028.565793] env[62383]: DEBUG nova.network.neutron [-] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.595937] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529042} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.595937] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 20861554-890b-4ad3-a73f-0c825a79bbf1/20861554-890b-4ad3-a73f-0c825a79bbf1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.595937] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1028.595937] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4052c869-8285-4c77-9fad-6cf870ccf4d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.602864] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1028.602864] env[62383]: value = "task-2452217" [ 1028.602864] env[62383]: _type = "Task" [ 1028.602864] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.613288] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.956672] env[62383]: INFO nova.compute.manager [-] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Took 1.50 seconds to deallocate network for instance. [ 1028.991313] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452216, 'name': ReconfigVM_Task, 'duration_secs': 0.31325} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.991694] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496542', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'name': 'volume-a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'eedc7859-3882-4837-9419-f9edce5f12fa', 'attached_at': '2025-02-11T15:32:25.000000', 'detached_at': '', 'volume_id': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b', 'serial': 'a8f42582-77d9-4eb1-ad31-7f0f6d22ee0b'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1028.992011] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.992761] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5206ff78-55c5-4cf1-b146-c8cd661eb654 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.999213] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.999655] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5b8946c-2b5a-4868-9d75-2695ab6765f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.060170] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1029.060398] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1029.060583] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleting the datastore file [datastore2] eedc7859-3882-4837-9419-f9edce5f12fa {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1029.060835] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffa9c5fd-1df0-4dff-8406-bdace4fa02e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.066860] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1029.066860] env[62383]: value = "task-2452219" [ 1029.066860] env[62383]: _type = "Task" [ 1029.066860] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.069995] env[62383]: INFO nova.compute.manager [-] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Took 1.67 seconds to deallocate network for instance. [ 1029.080444] env[62383]: DEBUG nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.082073] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.109614] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1029.109742] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1029.109856] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1029.110017] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1029.110228] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1029.110378] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1029.111292] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1029.111292] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1029.111292] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1029.111292] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1029.111292] env[62383]: DEBUG nova.virt.hardware [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1029.112592] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b54add5-65d0-4cd1-ab4d-0b069458e50d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.122050] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.175729} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.124059] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1029.124828] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f83715-e38c-4120-b1c2-82c214e5d6d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.128047] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18825ed6-7fc6-4237-a3b8-ae65d0d65584 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.162872] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 20861554-890b-4ad3-a73f-0c825a79bbf1/20861554-890b-4ad3-a73f-0c825a79bbf1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.163625] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61cb16c1-9984-4579-9daf-811d37fa6392 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.184671] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1029.184671] env[62383]: value = "task-2452220" [ 1029.184671] env[62383]: _type = "Task" [ 1029.184671] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.193045] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452220, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.321896] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855d3489-dc43-4819-8203-130576a1e2b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.331382] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78590263-08db-4d10-9798-f24857585a4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.360613] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ab195c-2f56-4674-b7f4-024421077ce3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.367991] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8386bcbd-b2b5-4004-89df-52b84c6ed753 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.373697] env[62383]: DEBUG nova.compute.manager [req-34ba60e0-3f26-4a1b-9425-4e0b8c0d2d30 req-16fe4d5a-3fb5-448c-b4f3-3339f358bc42 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Received event network-vif-plugged-59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1029.374041] env[62383]: DEBUG oslo_concurrency.lockutils [req-34ba60e0-3f26-4a1b-9425-4e0b8c0d2d30 req-16fe4d5a-3fb5-448c-b4f3-3339f358bc42 service nova] Acquiring lock "8f639983-e7ef-4a63-94b6-5c5256015937-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.374148] env[62383]: DEBUG oslo_concurrency.lockutils [req-34ba60e0-3f26-4a1b-9425-4e0b8c0d2d30 req-16fe4d5a-3fb5-448c-b4f3-3339f358bc42 service nova] Lock "8f639983-e7ef-4a63-94b6-5c5256015937-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.374333] env[62383]: DEBUG oslo_concurrency.lockutils [req-34ba60e0-3f26-4a1b-9425-4e0b8c0d2d30 req-16fe4d5a-3fb5-448c-b4f3-3339f358bc42 service nova] Lock "8f639983-e7ef-4a63-94b6-5c5256015937-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.374850] env[62383]: DEBUG nova.compute.manager [req-34ba60e0-3f26-4a1b-9425-4e0b8c0d2d30 req-16fe4d5a-3fb5-448c-b4f3-3339f358bc42 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] No waiting events found dispatching network-vif-plugged-59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1029.374850] env[62383]: WARNING nova.compute.manager [req-34ba60e0-3f26-4a1b-9425-4e0b8c0d2d30 req-16fe4d5a-3fb5-448c-b4f3-3339f358bc42 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Received unexpected event network-vif-plugged-59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 for instance with vm_state building and task_state spawning. [ 1029.384490] env[62383]: DEBUG nova.compute.provider_tree [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.463519] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.552732] env[62383]: DEBUG nova.network.neutron [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Successfully updated port: 59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1029.578042] env[62383]: DEBUG oslo_vmware.api [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1616} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.579111] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.579111] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.579111] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.579111] env[62383]: INFO nova.compute.manager [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1029.579342] env[62383]: DEBUG oslo.service.loopingcall [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.579681] env[62383]: DEBUG nova.compute.manager [-] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1029.579681] env[62383]: DEBUG nova.network.neutron [-] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.583401] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.696118] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452220, 'name': ReconfigVM_Task, 'duration_secs': 0.288501} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.696390] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 20861554-890b-4ad3-a73f-0c825a79bbf1/20861554-890b-4ad3-a73f-0c825a79bbf1.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1029.697042] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d85d35b-f369-40d9-bba3-e251c33e6b3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.703705] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1029.703705] env[62383]: value = "task-2452221" [ 1029.703705] env[62383]: _type = "Task" [ 1029.703705] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.711642] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452221, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.887743] env[62383]: DEBUG nova.scheduler.client.report [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.009464] env[62383]: DEBUG nova.compute.manager [req-8894989c-79e5-473f-9b98-a895c7f2ca92 req-e2fa35a3-4321-42c3-9234-27169c0daaa7 service nova] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Received event network-vif-deleted-d2ef4e8d-94b1-42c1-aa93-d5d46d04b5dd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1030.055769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "refresh_cache-8f639983-e7ef-4a63-94b6-5c5256015937" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.055923] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "refresh_cache-8f639983-e7ef-4a63-94b6-5c5256015937" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.056059] env[62383]: DEBUG nova.network.neutron [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.214177] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452221, 'name': Rename_Task, 'duration_secs': 0.149477} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.214177] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1030.214177] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58bcf9d0-4cf5-408d-a976-69fb853672e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.220481] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1030.220481] env[62383]: value = "task-2452222" [ 1030.220481] env[62383]: _type = "Task" [ 1030.220481] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.228213] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.394118] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.340s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.396360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.348s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.397856] env[62383]: INFO nova.compute.claims [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1030.426309] env[62383]: INFO nova.network.neutron [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating port 0afca8d2-b019-4a25-af28-7061dbf32e28 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1030.495173] env[62383]: INFO nova.compute.manager [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Rebuilding instance [ 1030.545309] env[62383]: DEBUG nova.compute.manager [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1030.546179] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3fd8fe-de82-47bc-91dc-0088957a718b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.596897] env[62383]: DEBUG nova.network.neutron [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1030.652371] env[62383]: DEBUG nova.network.neutron [-] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.730533] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452222, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.734053] env[62383]: DEBUG nova.network.neutron [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Updating instance_info_cache with network_info: [{"id": "59f7a7fd-f1a7-4f6e-a37b-bc4baf716887", "address": "fa:16:3e:ba:d4:a1", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59f7a7fd-f1", "ovs_interfaceid": "59f7a7fd-f1a7-4f6e-a37b-bc4baf716887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.154853] env[62383]: INFO nova.compute.manager [-] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Took 1.58 seconds to deallocate network for instance. [ 1031.230450] env[62383]: DEBUG oslo_vmware.api [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452222, 'name': PowerOnVM_Task, 'duration_secs': 0.995523} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.230822] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.231117] env[62383]: INFO nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1031.231425] env[62383]: DEBUG nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.232791] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b9da91-b647-47bf-9210-cdb761ff0e61 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.236080] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "refresh_cache-8f639983-e7ef-4a63-94b6-5c5256015937" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.236377] env[62383]: DEBUG nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Instance network_info: |[{"id": "59f7a7fd-f1a7-4f6e-a37b-bc4baf716887", "address": "fa:16:3e:ba:d4:a1", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59f7a7fd-f1", "ovs_interfaceid": "59f7a7fd-f1a7-4f6e-a37b-bc4baf716887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1031.236740] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:d4:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59f7a7fd-f1a7-4f6e-a37b-bc4baf716887', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1031.244717] env[62383]: DEBUG oslo.service.loopingcall [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.245746] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1031.248629] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5949b25e-51d5-4899-bb28-43ff025990ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.268914] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1031.268914] env[62383]: value = "task-2452223" [ 1031.268914] env[62383]: _type = "Task" [ 1031.268914] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.276439] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452223, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.402334] env[62383]: DEBUG nova.compute.manager [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Received event network-changed-59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1031.402549] env[62383]: DEBUG nova.compute.manager [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Refreshing instance network info cache due to event network-changed-59f7a7fd-f1a7-4f6e-a37b-bc4baf716887. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1031.402768] env[62383]: DEBUG oslo_concurrency.lockutils [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] Acquiring lock "refresh_cache-8f639983-e7ef-4a63-94b6-5c5256015937" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.402915] env[62383]: DEBUG oslo_concurrency.lockutils [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] Acquired lock "refresh_cache-8f639983-e7ef-4a63-94b6-5c5256015937" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.403093] env[62383]: DEBUG nova.network.neutron [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Refreshing network info cache for port 59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1031.558935] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.559328] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-161fd687-679d-4f22-a072-ab6dadffa6fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.565854] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1031.565854] env[62383]: value = "task-2452224" [ 1031.565854] env[62383]: _type = "Task" [ 1031.565854] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.573591] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.619492] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518cdc4e-6604-4c21-8158-13b59d1131c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.627378] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085d2cb3-4607-4065-8a45-66f61869c0b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.657275] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a34857-ddb1-4841-9163-470e1956968d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.665076] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a789f10e-f636-42d3-9ebb-bcb004e311b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.681363] env[62383]: DEBUG nova.compute.provider_tree [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.702820] env[62383]: INFO nova.compute.manager [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Took 0.55 seconds to detach 1 volumes for instance. [ 1031.774422] env[62383]: INFO nova.compute.manager [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Took 18.53 seconds to build instance. [ 1031.781575] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452223, 'name': CreateVM_Task, 'duration_secs': 0.312865} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.781741] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1031.782441] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.782608] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.782938] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1031.783226] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b0eda54-e3fc-4483-af9b-5c7bc3387587 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.788533] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1031.788533] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522689fa-a7b4-aeae-8073-558980d1f10a" [ 1031.788533] env[62383]: _type = "Task" [ 1031.788533] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.796915] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522689fa-a7b4-aeae-8073-558980d1f10a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.940726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.941046] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.941046] env[62383]: DEBUG nova.network.neutron [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.077501] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452224, 'name': PowerOffVM_Task, 'duration_secs': 0.174147} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.077771] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.077996] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1032.078759] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0484bae3-c1f2-4b8d-91a5-9a2421b7633d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.085183] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1032.085415] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-004e1a66-19b6-4fe9-bc15-7098156c6991 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.102637] env[62383]: DEBUG nova.network.neutron [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Updated VIF entry in instance network info cache for port 59f7a7fd-f1a7-4f6e-a37b-bc4baf716887. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1032.102986] env[62383]: DEBUG nova.network.neutron [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Updating instance_info_cache with network_info: [{"id": "59f7a7fd-f1a7-4f6e-a37b-bc4baf716887", "address": "fa:16:3e:ba:d4:a1", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59f7a7fd-f1", "ovs_interfaceid": "59f7a7fd-f1a7-4f6e-a37b-bc4baf716887", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.143874] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1032.144116] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1032.144303] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleting the datastore file [datastore1] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1032.144564] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c47e2ca1-142c-46aa-a890-2d483a7bcc6b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.150906] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1032.150906] env[62383]: value = "task-2452226" [ 1032.150906] env[62383]: _type = "Task" [ 1032.150906] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.158468] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.184928] env[62383]: DEBUG nova.scheduler.client.report [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.209907] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.276694] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0402ab6f-c321-4375-9364-41b457270e50 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.045s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.298899] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522689fa-a7b4-aeae-8073-558980d1f10a, 'name': SearchDatastore_Task, 'duration_secs': 0.014589} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.299319] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.299545] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1032.299817] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.300024] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.300224] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1032.300511] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85033814-f492-43b5-a90c-1df1f2b730f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.318540] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1032.318755] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1032.319683] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd7f852d-ecfd-46e2-98a5-73e142f5bf58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.324983] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1032.324983] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522f5435-7067-c9c9-6c75-585240792d2a" [ 1032.324983] env[62383]: _type = "Task" [ 1032.324983] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.333839] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522f5435-7067-c9c9-6c75-585240792d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.605958] env[62383]: DEBUG oslo_concurrency.lockutils [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] Releasing lock "refresh_cache-8f639983-e7ef-4a63-94b6-5c5256015937" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.606246] env[62383]: DEBUG nova.compute.manager [req-169635c3-a889-4c04-972b-5698b5a2778a req-cc61f2f0-e99e-49e5-9186-fdbeab764c69 service nova] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Received event network-vif-deleted-6ba23e44-2c77-442d-9aee-5a75d8abab68 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1032.632985] env[62383]: DEBUG nova.network.neutron [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": "0afca8d2-b019-4a25-af28-7061dbf32e28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.661971] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20959} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.662243] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1032.662430] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1032.662601] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1032.690762] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.691311] env[62383]: DEBUG nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1032.693823] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.931s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.694059] env[62383]: DEBUG nova.objects.instance [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'resources' on Instance uuid 4cd9c7be-c5f4-460b-a9e2-e8f778076947 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.837285] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522f5435-7067-c9c9-6c75-585240792d2a, 'name': SearchDatastore_Task, 'duration_secs': 0.008942} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.838100] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c339570-318f-467c-aa3e-81f4118c8c9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.843589] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1032.843589] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520c0429-d9ae-73e6-c9ff-286b1f7e14fd" [ 1032.843589] env[62383]: _type = "Task" [ 1032.843589] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.851366] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520c0429-d9ae-73e6-c9ff-286b1f7e14fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.015104] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.015374] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.136241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.162625] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1894f407a8ad5ed23cc2ca4edb621768',container_format='bare',created_at=2025-02-11T15:31:49Z,direct_url=,disk_format='vmdk',id=83286fd2-a028-4799-8ed9-fae62546d213,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1029383248-shelved',owner='b9f2dba3783e48968554ca75be01cd5c',properties=ImageMetaProps,protected=,size=31590912,status='active',tags=,updated_at=2025-02-11T15:32:06Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.162872] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.163039] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.163227] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.163377] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.163525] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.163730] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.163887] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.164064] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.164231] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.164403] env[62383]: DEBUG nova.virt.hardware [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.165266] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a63f27-63f5-4065-954a-2859e37dc1b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.176309] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6f7dc1-ded5-462e-a743-86105cdd530b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.189711] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:5a:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b36c5ae6-c344-4bd1-8239-29128e2bbfbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0afca8d2-b019-4a25-af28-7061dbf32e28', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.197857] env[62383]: DEBUG oslo.service.loopingcall [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.199360] env[62383]: DEBUG nova.compute.utils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1033.203685] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.204495] env[62383]: DEBUG nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1033.204809] env[62383]: DEBUG nova.network.neutron [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1033.206485] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3dc559e-ee1f-42a3-80fc-1f417c91c572 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.232861] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.232861] env[62383]: value = "task-2452227" [ 1033.232861] env[62383]: _type = "Task" [ 1033.232861] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.242259] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452227, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.264236] env[62383]: DEBUG nova.policy [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7dd6d207b93b431895b5bb3c749cddc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '439cb1d6c59f4407921de2276ff4f0a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1033.354277] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520c0429-d9ae-73e6-c9ff-286b1f7e14fd, 'name': SearchDatastore_Task, 'duration_secs': 0.013567} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.354586] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.354927] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8f639983-e7ef-4a63-94b6-5c5256015937/8f639983-e7ef-4a63-94b6-5c5256015937.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1033.355067] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-637ca28e-4882-46ff-a305-d36e1d64f47c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.365023] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1033.365023] env[62383]: value = "task-2452228" [ 1033.365023] env[62383]: _type = "Task" [ 1033.365023] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.376014] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.419988] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd5d203-6b10-4165-af35-89d65ed37cb5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.428492] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59aa6dd-32a3-418c-8d21-f625a6c97329 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.466137] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65cc1e4-b01f-4160-ad31-6f41bc080f90 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.470111] env[62383]: DEBUG nova.compute.manager [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received event network-vif-plugged-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1033.470376] env[62383]: DEBUG oslo_concurrency.lockutils [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.470600] env[62383]: DEBUG oslo_concurrency.lockutils [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.470808] env[62383]: DEBUG oslo_concurrency.lockutils [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.470998] env[62383]: DEBUG nova.compute.manager [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] No waiting events found dispatching network-vif-plugged-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1033.471235] env[62383]: WARNING nova.compute.manager [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received unexpected event network-vif-plugged-0afca8d2-b019-4a25-af28-7061dbf32e28 for instance with vm_state shelved_offloaded and task_state spawning. [ 1033.471418] env[62383]: DEBUG nova.compute.manager [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received event network-changed-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1033.471607] env[62383]: DEBUG nova.compute.manager [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Refreshing instance network info cache due to event network-changed-0afca8d2-b019-4a25-af28-7061dbf32e28. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1033.471830] env[62383]: DEBUG oslo_concurrency.lockutils [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] Acquiring lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.471979] env[62383]: DEBUG oslo_concurrency.lockutils [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] Acquired lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.472186] env[62383]: DEBUG nova.network.neutron [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Refreshing network info cache for port 0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.480192] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62db4d1-ef47-41e8-a483-9e787d857711 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.496619] env[62383]: DEBUG nova.compute.provider_tree [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.518650] env[62383]: DEBUG nova.compute.utils [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1033.536920] env[62383]: DEBUG nova.network.neutron [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Successfully created port: b55c61d2-6116-422c-9da3-9f29d3174451 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.699051] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.699481] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.699542] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.699691] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.699844] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.699995] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.700703] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.700703] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.700703] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.700967] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.700994] env[62383]: DEBUG nova.virt.hardware [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.702044] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d79d71f-7b6b-4865-aef4-176dbd9dffd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.706168] env[62383]: DEBUG nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1033.716153] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a508d90-de75-4d43-bf80-c4a34911e90e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.730874] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:7e:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61bdafb3-8c09-454a-af63-5aaacc52947b', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.738710] env[62383]: DEBUG oslo.service.loopingcall [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.739063] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.742470] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f886bde-24eb-465a-8615-7d8ba90af20d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.765630] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452227, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.766118] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.766118] env[62383]: value = "task-2452229" [ 1033.766118] env[62383]: _type = "Task" [ 1033.766118] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.775151] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452229, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.877156] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501982} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.877419] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8f639983-e7ef-4a63-94b6-5c5256015937/8f639983-e7ef-4a63-94b6-5c5256015937.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1033.877634] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1033.877892] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12285f6c-1c5d-40fb-ba2d-7652f3587ccf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.884083] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1033.884083] env[62383]: value = "task-2452230" [ 1033.884083] env[62383]: _type = "Task" [ 1033.884083] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.893540] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.002235] env[62383]: DEBUG nova.scheduler.client.report [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.022331] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.216543] env[62383]: DEBUG nova.network.neutron [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updated VIF entry in instance network info cache for port 0afca8d2-b019-4a25-af28-7061dbf32e28. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.216543] env[62383]: DEBUG nova.network.neutron [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": "0afca8d2-b019-4a25-af28-7061dbf32e28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.249101] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452227, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.275298] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452229, 'name': CreateVM_Task, 'duration_secs': 0.368067} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.275477] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.276162] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.276363] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.276679] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.277013] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7fd08fb-666e-4251-bf32-57112a98dbf2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.281556] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1034.281556] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5233fbe7-1366-ec8f-097d-44b9252330e5" [ 1034.281556] env[62383]: _type = "Task" [ 1034.281556] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.289255] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5233fbe7-1366-ec8f-097d-44b9252330e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.393764] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070102} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.394138] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1034.394940] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb6ee79-1f9c-4bfc-830b-11f74d02f9db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.416647] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 8f639983-e7ef-4a63-94b6-5c5256015937/8f639983-e7ef-4a63-94b6-5c5256015937.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1034.416938] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f71d7569-d4bc-436f-8b49-bf1e8ada1ba8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.438594] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1034.438594] env[62383]: value = "task-2452231" [ 1034.438594] env[62383]: _type = "Task" [ 1034.438594] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.447174] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.509516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.815s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.512691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.569s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.513291] env[62383]: DEBUG nova.objects.instance [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lazy-loading 'resources' on Instance uuid 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.535775] env[62383]: INFO nova.scheduler.client.report [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted allocations for instance 4cd9c7be-c5f4-460b-a9e2-e8f778076947 [ 1034.715084] env[62383]: DEBUG nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1034.718978] env[62383]: DEBUG oslo_concurrency.lockutils [req-2dc13f9b-de18-480f-b48a-2b04ae1b50f9 req-beefaa9d-b2a1-4a1e-bba9-30658ad7ef60 service nova] Releasing lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.746598] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1034.746841] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.746997] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1034.747265] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.747426] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1034.747574] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1034.747776] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1034.747932] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1034.748109] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1034.748274] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1034.748444] env[62383]: DEBUG nova.virt.hardware [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1034.749517] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f1d29d-4869-4bf4-8c13-093dc3a173ec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.759533] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1681e35c-5f05-4c69-9320-133d914e8c71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.763244] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452227, 'name': CreateVM_Task, 'duration_secs': 1.325671} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.763401] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.764295] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.764455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.764800] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.765031] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e28670e-1715-42f9-9166-7dcae766fba1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.777490] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1034.777490] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52367c7e-9c98-6f6d-5af5-9cb387ba4da4" [ 1034.777490] env[62383]: _type = "Task" [ 1034.777490] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.784307] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52367c7e-9c98-6f6d-5af5-9cb387ba4da4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.792062] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5233fbe7-1366-ec8f-097d-44b9252330e5, 'name': SearchDatastore_Task, 'duration_secs': 0.009125} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.792062] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.792285] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.792506] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.792650] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.792823] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.793054] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03598c86-cb26-4783-abec-aff18b2f7c70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.801446] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.801630] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.802313] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca11736d-75fa-406e-aebd-3f7744d69238 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.806716] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1034.806716] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52750a7e-b55b-d7f0-0926-9faede721ddc" [ 1034.806716] env[62383]: _type = "Task" [ 1034.806716] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.814391] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52750a7e-b55b-d7f0-0926-9faede721ddc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.940735] env[62383]: DEBUG nova.compute.manager [req-b3975e25-8882-4319-a6e5-23d047a3ae41 req-ff151904-8339-4e3d-bb86-b3b9ed6c9333 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Received event network-vif-plugged-b55c61d2-6116-422c-9da3-9f29d3174451 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1034.941034] env[62383]: DEBUG oslo_concurrency.lockutils [req-b3975e25-8882-4319-a6e5-23d047a3ae41 req-ff151904-8339-4e3d-bb86-b3b9ed6c9333 service nova] Acquiring lock "690dca62-cafb-40f7-92f0-9bbfde3467b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.941187] env[62383]: DEBUG oslo_concurrency.lockutils [req-b3975e25-8882-4319-a6e5-23d047a3ae41 req-ff151904-8339-4e3d-bb86-b3b9ed6c9333 service nova] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.941376] env[62383]: DEBUG oslo_concurrency.lockutils [req-b3975e25-8882-4319-a6e5-23d047a3ae41 req-ff151904-8339-4e3d-bb86-b3b9ed6c9333 service nova] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.941604] env[62383]: DEBUG nova.compute.manager [req-b3975e25-8882-4319-a6e5-23d047a3ae41 req-ff151904-8339-4e3d-bb86-b3b9ed6c9333 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] No waiting events found dispatching network-vif-plugged-b55c61d2-6116-422c-9da3-9f29d3174451 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.941722] env[62383]: WARNING nova.compute.manager [req-b3975e25-8882-4319-a6e5-23d047a3ae41 req-ff151904-8339-4e3d-bb86-b3b9ed6c9333 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Received unexpected event network-vif-plugged-b55c61d2-6116-422c-9da3-9f29d3174451 for instance with vm_state building and task_state spawning. [ 1034.951262] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452231, 'name': ReconfigVM_Task, 'duration_secs': 0.34753} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.951529] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 8f639983-e7ef-4a63-94b6-5c5256015937/8f639983-e7ef-4a63-94b6-5c5256015937.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1034.952143] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87e4a01c-63a6-4c38-baff-6c12f1b465ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.958154] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1034.958154] env[62383]: value = "task-2452232" [ 1034.958154] env[62383]: _type = "Task" [ 1034.958154] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.966026] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452232, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.046954] env[62383]: DEBUG nova.network.neutron [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Successfully updated port: b55c61d2-6116-422c-9da3-9f29d3174451 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.048444] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f54093f5-3ec4-409c-a5c3-1ecdc720280c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "4cd9c7be-c5f4-460b-a9e2-e8f778076947" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.141s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.107283] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.107650] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.107910] env[62383]: INFO nova.compute.manager [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Attaching volume 656a8db8-6cf0-47a8-a1b4-ea050e96ecc1 to /dev/sdb [ 1035.143556] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2c993a-f86b-4b12-b0bd-6b97e5979ef4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.152609] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a896e620-1b1b-4684-ae67-f5517f260665 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.165732] env[62383]: DEBUG nova.virt.block_device [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updating existing volume attachment record: e595d970-d9f1-4221-a1b2-ac656eaf2761 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1035.235867] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a88c67-3059-4b50-bbba-4a0b78bd1aa4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.242904] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584f2bb4-7d1c-47d9-96e6-1f503306736b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.275109] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d79aa3f-537f-4c7b-b980-a661f0cb983a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.284279] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bfb773-c3e5-4244-8328-0a9d079e2ff1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.291894] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.292157] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Processing image 83286fd2-a028-4799-8ed9-fae62546d213 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.292385] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213/83286fd2-a028-4799-8ed9-fae62546d213.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.292529] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "[datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213/83286fd2-a028-4799-8ed9-fae62546d213.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.292730] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.292919] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-052bc9c9-cca4-4d54-b006-86a100845db7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.302200] env[62383]: DEBUG nova.compute.provider_tree [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.316595] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.316770] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.317737] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52750a7e-b55b-d7f0-0926-9faede721ddc, 'name': SearchDatastore_Task, 'duration_secs': 0.070148} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.317928] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb7e90a6-74db-4261-bda6-beb634ac456d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.320894] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba937b94-0d00-4300-8479-a9775944ac5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.325699] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1035.325699] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5275a6f8-6e70-7dfc-5997-345a81d9a335" [ 1035.325699] env[62383]: _type = "Task" [ 1035.325699] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.326804] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1035.326804] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52bc12f3-404e-b2f3-de6c-6b8bae464953" [ 1035.326804] env[62383]: _type = "Task" [ 1035.326804] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.336450] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5275a6f8-6e70-7dfc-5997-345a81d9a335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.339120] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bc12f3-404e-b2f3-de6c-6b8bae464953, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.469621] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452232, 'name': Rename_Task, 'duration_secs': 0.143885} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.469993] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.470282] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ee19001-4c11-471e-9040-8c759d03e586 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.476382] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1035.476382] env[62383]: value = "task-2452234" [ 1035.476382] env[62383]: _type = "Task" [ 1035.476382] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.484609] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.550536] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "refresh_cache-690dca62-cafb-40f7-92f0-9bbfde3467b6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.550700] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "refresh_cache-690dca62-cafb-40f7-92f0-9bbfde3467b6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.550860] env[62383]: DEBUG nova.network.neutron [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.805433] env[62383]: DEBUG nova.scheduler.client.report [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.841326] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bc12f3-404e-b2f3-de6c-6b8bae464953, 'name': SearchDatastore_Task, 'duration_secs': 0.048362} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.845041] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.845546] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.846027] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ad04072-22a8-44c6-8b07-2be1d0f4a5b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.850039] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Preparing fetch location {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1035.850321] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Fetch image to [datastore2] OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25/OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25.vmdk {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1035.850569] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Downloading stream optimized image 83286fd2-a028-4799-8ed9-fae62546d213 to [datastore2] OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25/OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25.vmdk on the data store datastore2 as vApp {{(pid=62383) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1035.850768] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Downloading image file data 83286fd2-a028-4799-8ed9-fae62546d213 to the ESX as VM named 'OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25' {{(pid=62383) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1035.858187] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1035.858187] env[62383]: value = "task-2452237" [ 1035.858187] env[62383]: _type = "Task" [ 1035.858187] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.865830] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452237, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.928387] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1035.928387] env[62383]: value = "resgroup-9" [ 1035.928387] env[62383]: _type = "ResourcePool" [ 1035.928387] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1035.928675] env[62383]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-5d25812d-31a0-4fb0-98c2-33d21ba2ef53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.949436] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lease: (returnval){ [ 1035.949436] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a321b3-7ff3-b7c6-293c-c1a98445c9cd" [ 1035.949436] env[62383]: _type = "HttpNfcLease" [ 1035.949436] env[62383]: } obtained for vApp import into resource pool (val){ [ 1035.949436] env[62383]: value = "resgroup-9" [ 1035.949436] env[62383]: _type = "ResourcePool" [ 1035.949436] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1035.949755] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the lease: (returnval){ [ 1035.949755] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a321b3-7ff3-b7c6-293c-c1a98445c9cd" [ 1035.949755] env[62383]: _type = "HttpNfcLease" [ 1035.949755] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1035.956796] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1035.956796] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a321b3-7ff3-b7c6-293c-c1a98445c9cd" [ 1035.956796] env[62383]: _type = "HttpNfcLease" [ 1035.956796] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1035.986310] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452234, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.102789] env[62383]: DEBUG nova.network.neutron [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1036.310469] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.798s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.313195] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.571s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.313489] env[62383]: DEBUG nova.objects.instance [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'resources' on Instance uuid 4d929f43-cea2-41a0-9822-180a2647be2c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1036.343863] env[62383]: DEBUG nova.network.neutron [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Updating instance_info_cache with network_info: [{"id": "b55c61d2-6116-422c-9da3-9f29d3174451", "address": "fa:16:3e:06:e4:ca", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb55c61d2-61", "ovs_interfaceid": "b55c61d2-6116-422c-9da3-9f29d3174451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.355110] env[62383]: INFO nova.scheduler.client.report [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Deleted allocations for instance 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270 [ 1036.371719] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452237, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.458308] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1036.458308] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a321b3-7ff3-b7c6-293c-c1a98445c9cd" [ 1036.458308] env[62383]: _type = "HttpNfcLease" [ 1036.458308] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1036.487835] env[62383]: DEBUG oslo_vmware.api [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452234, 'name': PowerOnVM_Task, 'duration_secs': 0.522037} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.488120] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1036.488355] env[62383]: INFO nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Took 7.41 seconds to spawn the instance on the hypervisor. [ 1036.488496] env[62383]: DEBUG nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1036.489285] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dde7324-4e53-4173-ab75-1e32e8aff05a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.847587] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "refresh_cache-690dca62-cafb-40f7-92f0-9bbfde3467b6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.850666] env[62383]: DEBUG nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Instance network_info: |[{"id": "b55c61d2-6116-422c-9da3-9f29d3174451", "address": "fa:16:3e:06:e4:ca", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb55c61d2-61", "ovs_interfaceid": "b55c61d2-6116-422c-9da3-9f29d3174451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1036.850666] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:e4:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b55c61d2-6116-422c-9da3-9f29d3174451', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.856215] env[62383]: DEBUG oslo.service.loopingcall [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.859790] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.860218] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6cf3d573-197f-4a65-bfa2-8682084f9da5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.883214] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c35fa012-0a04-4be2-9053-c567603c88f8 tempest-ServerMetadataTestJSON-1220543904 tempest-ServerMetadataTestJSON-1220543904-project-member] Lock "8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.366s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.889745] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452237, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521647} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.899619] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1036.899619] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.899619] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.899619] env[62383]: value = "task-2452239" [ 1036.899619] env[62383]: _type = "Task" [ 1036.899619] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.899619] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cacdc3df-96f2-43be-b4fe-4b0b79ee6efb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.904054] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452239, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.906284] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1036.906284] env[62383]: value = "task-2452240" [ 1036.906284] env[62383]: _type = "Task" [ 1036.906284] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.959994] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1036.959994] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a321b3-7ff3-b7c6-293c-c1a98445c9cd" [ 1036.959994] env[62383]: _type = "HttpNfcLease" [ 1036.959994] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1036.975798] env[62383]: DEBUG nova.compute.manager [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Received event network-changed-b55c61d2-6116-422c-9da3-9f29d3174451 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1036.975798] env[62383]: DEBUG nova.compute.manager [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Refreshing instance network info cache due to event network-changed-b55c61d2-6116-422c-9da3-9f29d3174451. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1036.976086] env[62383]: DEBUG oslo_concurrency.lockutils [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] Acquiring lock "refresh_cache-690dca62-cafb-40f7-92f0-9bbfde3467b6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.977102] env[62383]: DEBUG oslo_concurrency.lockutils [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] Acquired lock "refresh_cache-690dca62-cafb-40f7-92f0-9bbfde3467b6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.977332] env[62383]: DEBUG nova.network.neutron [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Refreshing network info cache for port b55c61d2-6116-422c-9da3-9f29d3174451 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1037.010366] env[62383]: INFO nova.compute.manager [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Took 23.35 seconds to build instance. [ 1037.061007] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24929f4e-fccd-482e-a76b-f8c3929518d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.068892] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e21560-47ad-48a9-a333-e5d629e105d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.103018] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41b8222-ffcf-44e9-98d1-99376795d88d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.110487] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e16e305-194e-4dd6-b392-0ed4771d396d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.124170] env[62383]: DEBUG nova.compute.provider_tree [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1037.407537] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452239, 'name': CreateVM_Task, 'duration_secs': 0.513502} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.407710] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.408516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.408576] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.408911] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1037.409233] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ccd0f8e-29d7-4263-ba3b-d2859041234a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.418627] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1037.418627] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e89a86-2963-25b1-f8bd-2e3490059419" [ 1037.418627] env[62383]: _type = "Task" [ 1037.418627] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.421555] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452240, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09164} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.425027] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.425644] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83242198-61e8-4e7e-ad96-f5a96c92ca79 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.432553] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e89a86-2963-25b1-f8bd-2e3490059419, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.450709] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.450993] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcad2027-714c-4049-b003-ccc8f2d2cdae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.475034] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.475259] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.478514] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1037.478514] env[62383]: value = "task-2452241" [ 1037.478514] env[62383]: _type = "Task" [ 1037.478514] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.480754] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1037.480754] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a321b3-7ff3-b7c6-293c-c1a98445c9cd" [ 1037.480754] env[62383]: _type = "HttpNfcLease" [ 1037.480754] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1037.486222] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1037.486222] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a321b3-7ff3-b7c6-293c-c1a98445c9cd" [ 1037.486222] env[62383]: _type = "HttpNfcLease" [ 1037.486222] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1037.491069] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20264315-8469-4092-8aeb-174472b2bbe6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.499908] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452241, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.503260] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7f83c-3065-24df-4917-dff0a2b8de1c/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1037.503464] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating HTTP connection to write to file with size = 31590912 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7f83c-3065-24df-4917-dff0a2b8de1c/disk-0.vmdk. {{(pid=62383) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1037.562216] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ba127aec-273c-4e5e-a8b1-b1e979baebfd tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "8f639983-e7ef-4a63-94b6-5c5256015937" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.910s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.570873] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-528375c4-2bae-466b-9869-e9530d99bd54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.652157] env[62383]: ERROR nova.scheduler.client.report [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [req-92177921-7725-4a39-a7b8-6d032668ba31] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-92177921-7725-4a39-a7b8-6d032668ba31"}]} [ 1037.673508] env[62383]: DEBUG nova.scheduler.client.report [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1037.692376] env[62383]: DEBUG nova.scheduler.client.report [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1037.692729] env[62383]: DEBUG nova.compute.provider_tree [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1037.710598] env[62383]: DEBUG nova.scheduler.client.report [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1037.737694] env[62383]: DEBUG nova.scheduler.client.report [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1037.817370] env[62383]: DEBUG nova.network.neutron [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Updated VIF entry in instance network info cache for port b55c61d2-6116-422c-9da3-9f29d3174451. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1037.817776] env[62383]: DEBUG nova.network.neutron [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Updating instance_info_cache with network_info: [{"id": "b55c61d2-6116-422c-9da3-9f29d3174451", "address": "fa:16:3e:06:e4:ca", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb55c61d2-61", "ovs_interfaceid": "b55c61d2-6116-422c-9da3-9f29d3174451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.938580] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e89a86-2963-25b1-f8bd-2e3490059419, 'name': SearchDatastore_Task, 'duration_secs': 0.029098} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.941037] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.941527] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.941602] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.941801] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.941996] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.945042] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8a04ec4-9f0f-4bb0-adf5-77ef61902e38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.959358] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.959358] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.961570] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0db1e2b5-022f-45a5-ad65-a4d8094f251e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.969975] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1037.969975] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d180de-4826-152a-4340-3a62dabd641b" [ 1037.969975] env[62383]: _type = "Task" [ 1037.969975] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.978280] env[62383]: DEBUG nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1037.987983] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d180de-4826-152a-4340-3a62dabd641b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.996939] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452241, 'name': ReconfigVM_Task, 'duration_secs': 0.340263} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.000086] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7/3e868358-2aa2-4ddd-9c2e-16eb5c194bb7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.002702] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e692bd51-fc33-42b9-9888-6750f7a7a244 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.012636] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1038.012636] env[62383]: value = "task-2452243" [ 1038.012636] env[62383]: _type = "Task" [ 1038.012636] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.019726] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e013de13-2c98-498a-9880-ee50cbdf88c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.025194] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452243, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.031720] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc4115c-f0d1-4b9c-9a2c-4df72cbb8c3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.072825] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd293ad7-a83c-4c9b-b0ed-54b8bc210785 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.080711] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5377e049-8e0c-42ca-ab08-ed3b739e6757 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.095027] env[62383]: DEBUG nova.compute.provider_tree [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1038.326657] env[62383]: DEBUG oslo_concurrency.lockutils [req-7598a21d-f98e-430d-88fc-86ce743c3bfe req-1186c114-4610-40fb-85a3-4e219ad0bf66 service nova] Releasing lock "refresh_cache-690dca62-cafb-40f7-92f0-9bbfde3467b6" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.486876] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d180de-4826-152a-4340-3a62dabd641b, 'name': SearchDatastore_Task, 'duration_secs': 0.012585} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.492088] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60350848-2f99-4bdc-98f2-27264c401bf3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.500306] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1038.500306] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52dc160f-3b45-9bb1-77ab-ce9a37753a61" [ 1038.500306] env[62383]: _type = "Task" [ 1038.500306] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.511805] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52dc160f-3b45-9bb1-77ab-ce9a37753a61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.513027] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.525368] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452243, 'name': Rename_Task, 'duration_secs': 0.181772} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.525708] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.526059] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9bf98de-f080-4d75-a627-28bd9cbee112 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.534515] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1038.534515] env[62383]: value = "task-2452244" [ 1038.534515] env[62383]: _type = "Task" [ 1038.534515] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.546306] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452244, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.634738] env[62383]: DEBUG nova.scheduler.client.report [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 143 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1038.635209] env[62383]: DEBUG nova.compute.provider_tree [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 143 to 144 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1038.635478] env[62383]: DEBUG nova.compute.provider_tree [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1038.814330] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Completed reading data from the image iterator. {{(pid=62383) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1038.814604] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7f83c-3065-24df-4917-dff0a2b8de1c/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1038.815581] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2642c0a7-cad4-4fb4-939b-b22cf63fd747 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.823362] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7f83c-3065-24df-4917-dff0a2b8de1c/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1038.823616] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7f83c-3065-24df-4917-dff0a2b8de1c/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1038.823912] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4eece1d8-4819-4b3b-8d38-11b9adebc19c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.011239] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52dc160f-3b45-9bb1-77ab-ce9a37753a61, 'name': SearchDatastore_Task, 'duration_secs': 0.013519} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.011627] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.011908] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 690dca62-cafb-40f7-92f0-9bbfde3467b6/690dca62-cafb-40f7-92f0-9bbfde3467b6.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1039.012196] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5436579-6432-467a-88de-e90048f24ec9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.018738] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1039.018738] env[62383]: value = "task-2452245" [ 1039.018738] env[62383]: _type = "Task" [ 1039.018738] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.028818] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.043347] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452244, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.099648] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7f83c-3065-24df-4917-dff0a2b8de1c/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1039.099990] env[62383]: INFO nova.virt.vmwareapi.images [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Downloaded image file data 83286fd2-a028-4799-8ed9-fae62546d213 [ 1039.101135] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4460be-d4fd-442c-85bb-63779e3f1a7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.121719] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccf189dd-acbe-485e-af4c-7dddef5c26d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.142942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.830s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.145869] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.376s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.146108] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.146341] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1039.146760] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.683s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.147378] env[62383]: DEBUG nova.objects.instance [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lazy-loading 'resources' on Instance uuid 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.149797] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6245e68e-718f-4cb6-908c-653a831c069c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.166432] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d37a7d-148e-466a-8fba-036bd4199c87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.171883] env[62383]: INFO nova.scheduler.client.report [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted allocations for instance 4d929f43-cea2-41a0-9822-180a2647be2c [ 1039.174565] env[62383]: INFO nova.virt.vmwareapi.images [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] The imported VM was unregistered [ 1039.176937] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Caching image {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1039.177231] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Creating directory with path [datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.191333] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f944085-7711-4aeb-b57b-42f818407ca0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.194770] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e710fbce-8a25-4d85-aa9d-a7e71453c957 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.202862] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54465daa-5e16-48ab-9e0b-826699afc5a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.211503] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Created directory with path [datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.211503] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25/OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25.vmdk to [datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213/83286fd2-a028-4799-8ed9-fae62546d213.vmdk. {{(pid=62383) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1039.236218] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-4aebeb50-e00c-48a6-aa63-42cf24dd00ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.239283] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179089MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1039.239283] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.247416] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1039.247416] env[62383]: value = "task-2452247" [ 1039.247416] env[62383]: _type = "Task" [ 1039.247416] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.259092] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.402516] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158c81ba-4dbe-4977-ae94-c544a4e3b686 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.412189] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6ba74d-9847-40ab-bf79-ab44d2948af8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.444722] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fd6367-83ae-40f2-85d7-921cb2770658 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.457411] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1f5d77-e908-4712-8861-24c73e0d6ddb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.472173] env[62383]: DEBUG nova.compute.provider_tree [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.530575] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452245, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.546532] env[62383]: DEBUG oslo_vmware.api [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452244, 'name': PowerOnVM_Task, 'duration_secs': 0.637131} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.546926] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.547379] env[62383]: DEBUG nova.compute.manager [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.548412] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244e4d98-5113-4982-8f4c-8baa0c8547ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.697437] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9fd1992f-a828-4f9f-89fe-a1ff815b7034 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "4d929f43-cea2-41a0-9822-180a2647be2c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.042s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.758048] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.977132] env[62383]: DEBUG nova.scheduler.client.report [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.030757] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452245, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.692386} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.031426] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 690dca62-cafb-40f7-92f0-9bbfde3467b6/690dca62-cafb-40f7-92f0-9bbfde3467b6.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1040.031779] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1040.032350] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f605ea93-666d-4d87-b2c4-8a954e966624 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.041235] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1040.041235] env[62383]: value = "task-2452248" [ 1040.041235] env[62383]: _type = "Task" [ 1040.041235] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.050501] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.070641] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.217389] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1040.217638] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1040.218596] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64de6622-51c8-45ed-9617-49791590899a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.235968] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c1a97a-3e82-4df3-8c03-1141264cc97b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.262252] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1/volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.265585] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5441a30a-3b76-419a-8516-7553feba5c39 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.287699] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.289569] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1040.289569] env[62383]: value = "task-2452249" [ 1040.289569] env[62383]: _type = "Task" [ 1040.289569] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.300278] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452249, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.482987] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.336s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.485210] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.902s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.485454] env[62383]: DEBUG nova.objects.instance [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'resources' on Instance uuid 67f05a2b-f323-4e4a-ac13-7f4745593be0 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.513731] env[62383]: INFO nova.scheduler.client.report [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Deleted allocations for instance 2c93bdf1-aaf4-4e40-898a-634dc00d05e6 [ 1040.551908] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.206402} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.552213] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1040.553023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9c512f-c115-470b-bae2-9543fe235843 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.576419] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 690dca62-cafb-40f7-92f0-9bbfde3467b6/690dca62-cafb-40f7-92f0-9bbfde3467b6.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.576787] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b38ac3f0-35b1-4e4e-bb68-d92357f7fd76 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.600497] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1040.600497] env[62383]: value = "task-2452250" [ 1040.600497] env[62383]: _type = "Task" [ 1040.600497] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.612092] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452250, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.764042] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.800381] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452249, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.024349] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ac5b20e2-be36-4df7-8c6c-10dcf2fa4a63 tempest-ServersTestJSON-715409334 tempest-ServersTestJSON-715409334-project-member] Lock "2c93bdf1-aaf4-4e40-898a-634dc00d05e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.183s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.117340] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452250, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.264443] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.278970] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432bff92-9158-4d70-acf9-13e0e0e01f05 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.287305] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3448c311-7115-4bc9-af73-1fa612498ebd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.298857] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452249, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.325751] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e12e78-b024-4e42-8bbc-3a626a94eca1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.334232] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5290d8-d3b0-4422-91da-437e47d95010 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.352382] env[62383]: DEBUG nova.compute.provider_tree [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.615651] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452250, 'name': ReconfigVM_Task, 'duration_secs': 0.852717} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.616085] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 690dca62-cafb-40f7-92f0-9bbfde3467b6/690dca62-cafb-40f7-92f0-9bbfde3467b6.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.616990] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26cc337c-97b4-4890-9eb2-43f4d313cc89 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.624854] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1041.624854] env[62383]: value = "task-2452251" [ 1041.624854] env[62383]: _type = "Task" [ 1041.624854] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.637663] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452251, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.765080] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.800851] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452249, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.806795] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "981aa014-4861-4ab3-94e3-c113eec9bf29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.807047] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.855876] env[62383]: DEBUG nova.scheduler.client.report [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.136344] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452251, 'name': Rename_Task, 'duration_secs': 0.345585} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.137126] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.137579] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e31f86c-b195-4fcb-ab60-8d72ea62f8c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.144344] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1042.144344] env[62383]: value = "task-2452252" [ 1042.144344] env[62383]: _type = "Task" [ 1042.144344] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.154537] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452252, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.269556] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.303689] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452249, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.309316] env[62383]: DEBUG nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1042.363558] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.878s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.366015] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.156s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.368451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.368451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.855s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.372023] env[62383]: INFO nova.compute.claims [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.405016] env[62383]: INFO nova.scheduler.client.report [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted allocations for instance eedc7859-3882-4837-9419-f9edce5f12fa [ 1042.405883] env[62383]: INFO nova.scheduler.client.report [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted allocations for instance 67f05a2b-f323-4e4a-ac13-7f4745593be0 [ 1042.655641] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452252, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.771343] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452247, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.042366} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.771660] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25/OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25.vmdk to [datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213/83286fd2-a028-4799-8ed9-fae62546d213.vmdk. [ 1042.771903] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Cleaning up location [datastore2] OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1042.772135] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_1549aef3-2078-4cc1-901d-fe54b471ce25 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.772429] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-337664f7-99c0-49ac-b3c9-4a547746a22f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.779074] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1042.779074] env[62383]: value = "task-2452253" [ 1042.779074] env[62383]: _type = "Task" [ 1042.779074] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.787301] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.804110] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452249, 'name': ReconfigVM_Task, 'duration_secs': 2.133926} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.804441] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfigured VM instance instance-0000005c to attach disk [datastore2] volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1/volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1042.809243] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdab3ef9-7927-40dd-9e4d-5e96db1334a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.827444] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1042.827444] env[62383]: value = "task-2452254" [ 1042.827444] env[62383]: _type = "Task" [ 1042.827444] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.839115] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452254, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.840247] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.916641] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5fd69d6d-cae4-4389-9144-de1605a96d87 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "eedc7859-3882-4837-9419-f9edce5f12fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.115s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.918978] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f894a3c5-fa02-4493-a9db-041e32011710 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "67f05a2b-f323-4e4a-ac13-7f4745593be0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.153s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.155535] env[62383]: DEBUG oslo_vmware.api [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452252, 'name': PowerOnVM_Task, 'duration_secs': 0.549033} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.155884] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.156032] env[62383]: INFO nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Took 8.44 seconds to spawn the instance on the hypervisor. [ 1043.156218] env[62383]: DEBUG nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.156978] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67223df0-ee76-4afa-ba00-28b1d4899afd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.288558] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17591} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.288808] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1043.288977] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "[datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213/83286fd2-a028-4799-8ed9-fae62546d213.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.289241] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213/83286fd2-a028-4799-8ed9-fae62546d213.vmdk to [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.289494] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6adef083-c07c-423d-a35c-c4205159807c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.296419] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1043.296419] env[62383]: value = "task-2452255" [ 1043.296419] env[62383]: _type = "Task" [ 1043.296419] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.303689] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.337427] env[62383]: DEBUG oslo_vmware.api [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452254, 'name': ReconfigVM_Task, 'duration_secs': 0.278548} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.337749] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1043.542974] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5594cb-8898-40e4-b760-985a58b12bca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.551657] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ed2afc-9932-4495-93ba-1a5b2fd3308a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.585106] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a240f4-73a1-42e2-815b-226e233a4351 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.592967] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71fd3f3-bdeb-4091-8831-5ecd28e87583 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.606989] env[62383]: DEBUG nova.compute.provider_tree [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.673704] env[62383]: INFO nova.compute.manager [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Took 29.64 seconds to build instance. [ 1043.815521] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452255, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.933732] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.933985] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.112026] env[62383]: DEBUG nova.scheduler.client.report [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1044.176031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-75a1c80b-5c83-4a27-99a6-33d583c4e9ee tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.153s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.310383] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452255, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.389217] env[62383]: DEBUG nova.objects.instance [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'flavor' on Instance uuid 9f8e346e-815c-492d-84a9-00ebdca3bcc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1044.436803] env[62383]: DEBUG nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1044.617590] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.618309] env[62383]: DEBUG nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1044.621578] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.383s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.807853] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452255, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.893496] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e2eb7d48-ecee-48ae-87e3-54a06ab205ec tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.786s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.907353] env[62383]: INFO nova.compute.manager [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Rebuilding instance [ 1044.958964] env[62383]: DEBUG nova.compute.manager [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1044.959925] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc584a7-f6fb-444c-8515-23b84bcbeb3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.966854] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.127289] env[62383]: DEBUG nova.compute.utils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1045.138939] env[62383]: DEBUG nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1045.139224] env[62383]: DEBUG nova.network.neutron [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.197392] env[62383]: DEBUG nova.policy [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1045.309287] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452255, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.530845] env[62383]: DEBUG nova.network.neutron [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Successfully created port: 2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.642873] env[62383]: DEBUG nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1045.679183] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8bd05dac-7aa2-44c5-8752-6045c01d213d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.679382] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2f028680-8db4-474a-8f24-880c4702877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.679520] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1b025655-acad-4b70-9e1a-489683cafb7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.679683] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9f8e346e-815c-492d-84a9-00ebdca3bcc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.679787] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.679910] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ec7c648d-10b0-480a-a5f0-4dab08d0049e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.680038] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c56464dd-63af-4686-b666-d0ac2df01ec1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.682738] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 20861554-890b-4ad3-a73f-0c825a79bbf1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.682738] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8f639983-e7ef-4a63-94b6-5c5256015937 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.682738] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 690dca62-cafb-40f7-92f0-9bbfde3467b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.682738] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1045.730138] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "20861554-890b-4ad3-a73f-0c825a79bbf1" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.730454] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.730712] env[62383]: DEBUG nova.compute.manager [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1045.739023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6b7fa9-3e8c-4907-b85c-e563200eb219 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.751691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "de24aca8-30fc-453e-b192-b6bb115876ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.751691] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.756023] env[62383]: DEBUG nova.compute.manager [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1045.756023] env[62383]: DEBUG nova.objects.instance [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lazy-loading 'flavor' on Instance uuid 20861554-890b-4ad3-a73f-0c825a79bbf1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.811974] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452255, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.978187] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.978516] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e029da3-9d7e-4a35-815e-448c16e824aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.985405] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1045.985405] env[62383]: value = "task-2452256" [ 1045.985405] env[62383]: _type = "Task" [ 1045.985405] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.994958] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.185932] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 981aa014-4861-4ab3-94e3-c113eec9bf29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.251673] env[62383]: DEBUG nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1046.313617] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452255, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.643359} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.313944] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/83286fd2-a028-4799-8ed9-fae62546d213/83286fd2-a028-4799-8ed9-fae62546d213.vmdk to [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.314852] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662ad7e8-3b76-48a8-ae49-04e31004094b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.339609] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.339979] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-795cebbd-67e5-49aa-85ee-6f640c4f11c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.361145] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1046.361145] env[62383]: value = "task-2452257" [ 1046.361145] env[62383]: _type = "Task" [ 1046.361145] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.369655] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.497646] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452256, 'name': PowerOffVM_Task, 'duration_secs': 0.269129} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.498573] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1046.553527] env[62383]: INFO nova.compute.manager [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Detaching volume 656a8db8-6cf0-47a8-a1b4-ea050e96ecc1 [ 1046.587990] env[62383]: INFO nova.virt.block_device [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Attempting to driver detach volume 656a8db8-6cf0-47a8-a1b4-ea050e96ecc1 from mountpoint /dev/sdb [ 1046.588286] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1046.588498] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1046.589440] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e7dbc4-f7af-4316-a75f-cd7f0a51ac05 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.612285] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd837a4b-e583-45e1-854c-680e33a2686b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.619867] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457864be-5020-4c27-a7fa-46d9e6552d89 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.643161] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8f80c6-3231-4dd0-93f9-50bd9cdc560b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.659685] env[62383]: DEBUG nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1046.661771] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] The volume has not been displaced from its original location: [datastore2] volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1/volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1046.667033] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1046.667550] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6b4a5c5-7217-430c-9da0-f7a8b0681775 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.686558] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1046.686558] env[62383]: value = "task-2452258" [ 1046.686558] env[62383]: _type = "Task" [ 1046.686558] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.691904] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1046.692209] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1046.692376] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1046.692563] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1046.692725] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1046.692879] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1046.693098] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1046.693260] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1046.693427] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1046.693589] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1046.693762] env[62383]: DEBUG nova.virt.hardware [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1046.694541] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 0392d059-57ea-49fb-84d2-b71cbca840db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.696614] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1368102-daaf-4b21-bf80-7a765a0b814a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.707241] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5923129-31a5-4325-abc8-1dfc0eda9e82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.712363] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452258, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.762794] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1046.763077] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f3ea847-c33b-42e7-a3b1-b94ec0fb12e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.770705] env[62383]: DEBUG oslo_vmware.api [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1046.770705] env[62383]: value = "task-2452259" [ 1046.770705] env[62383]: _type = "Task" [ 1046.770705] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.775603] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.784599] env[62383]: DEBUG oslo_vmware.api [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452259, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.871905] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452257, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.200658] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance de24aca8-30fc-453e-b192-b6bb115876ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1047.200812] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1047.200962] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1047.203437] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452258, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.205842] env[62383]: DEBUG nova.compute.manager [req-778247b4-097b-47f7-a77a-4cbcbd221fe8 req-0baab48f-4356-4072-ab3c-d731ac1ce922 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-vif-plugged-2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1047.206045] env[62383]: DEBUG oslo_concurrency.lockutils [req-778247b4-097b-47f7-a77a-4cbcbd221fe8 req-0baab48f-4356-4072-ab3c-d731ac1ce922 service nova] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.206114] env[62383]: DEBUG oslo_concurrency.lockutils [req-778247b4-097b-47f7-a77a-4cbcbd221fe8 req-0baab48f-4356-4072-ab3c-d731ac1ce922 service nova] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.209709] env[62383]: DEBUG oslo_concurrency.lockutils [req-778247b4-097b-47f7-a77a-4cbcbd221fe8 req-0baab48f-4356-4072-ab3c-d731ac1ce922 service nova] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.209709] env[62383]: DEBUG nova.compute.manager [req-778247b4-097b-47f7-a77a-4cbcbd221fe8 req-0baab48f-4356-4072-ab3c-d731ac1ce922 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] No waiting events found dispatching network-vif-plugged-2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1047.209709] env[62383]: WARNING nova.compute.manager [req-778247b4-097b-47f7-a77a-4cbcbd221fe8 req-0baab48f-4356-4072-ab3c-d731ac1ce922 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received unexpected event network-vif-plugged-2a9eb423-4347-4116-825d-0afad0e10ad1 for instance with vm_state building and task_state spawning. [ 1047.284733] env[62383]: DEBUG oslo_vmware.api [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452259, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.301725] env[62383]: DEBUG nova.network.neutron [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Successfully updated port: 2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1047.379999] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452257, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.413361] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efcdea5-bbfa-4222-aa3c-2fb86136bb8e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.421304] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c07307-68b0-479c-8f86-950b6a3b5fe5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.452061] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d031bb-ca7f-42cc-a522-766ab8bb9223 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.459490] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320b1a89-a39f-4e5f-9c5d-b54b538b2a73 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.473480] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1047.473744] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.473943] env[62383]: INFO nova.compute.manager [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Shelving [ 1047.475769] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.699219] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452258, 'name': ReconfigVM_Task, 'duration_secs': 0.544806} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.699498] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1047.704198] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34c685e8-b2ae-4a95-9c8f-a07f01827dd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.718437] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1047.718437] env[62383]: value = "task-2452260" [ 1047.718437] env[62383]: _type = "Task" [ 1047.718437] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.726095] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452260, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.782116] env[62383]: DEBUG oslo_vmware.api [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452259, 'name': PowerOffVM_Task, 'duration_secs': 0.566518} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.782367] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.782559] env[62383]: DEBUG nova.compute.manager [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1047.783282] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133006c3-8cdb-4b60-b43f-24729e8dcb59 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.804049] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.804225] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.804377] env[62383]: DEBUG nova.network.neutron [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.872519] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452257, 'name': ReconfigVM_Task, 'duration_secs': 1.044059} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.872810] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Reconfigured VM instance instance-0000004f to attach disk [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1/c56464dd-63af-4686-b666-d0ac2df01ec1.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.873472] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-030c498b-a4fc-4618-9254-01d4151ddbbb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.879910] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1047.879910] env[62383]: value = "task-2452261" [ 1047.879910] env[62383]: _type = "Task" [ 1047.879910] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.887836] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452261, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.980842] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1048.229424] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452260, 'name': ReconfigVM_Task, 'duration_secs': 0.192499} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.229800] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1048.295982] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ad6331d-58e5-4a23-9d29-c5f985894c6f tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.565s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.345915] env[62383]: DEBUG nova.network.neutron [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1048.390741] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452261, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.487060] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1048.487060] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.865s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.487431] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.487830] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 8.417s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.488036] env[62383]: DEBUG nova.objects.instance [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1048.490710] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-354ca779-dff0-439b-b2eb-869e8d1b5603 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.496510] env[62383]: DEBUG nova.network.neutron [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.498655] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1048.498655] env[62383]: value = "task-2452262" [ 1048.498655] env[62383]: _type = "Task" [ 1048.498655] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.506768] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.891227] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452261, 'name': Rename_Task, 'duration_secs': 0.522529} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.891500] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.891748] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8f455a5-e7a2-4802-9af4-39f75e0f8926 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.897073] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1048.897073] env[62383]: value = "task-2452263" [ 1048.897073] env[62383]: _type = "Task" [ 1048.897073] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.904555] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.999525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.999941] env[62383]: DEBUG nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Instance network_info: |[{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1049.000478] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:b3:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a9eb423-4347-4116-825d-0afad0e10ad1', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.008104] env[62383]: DEBUG oslo.service.loopingcall [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.011906] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1049.012150] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5eba331d-09c8-4c39-91c5-151e6d318187 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.033117] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452262, 'name': PowerOffVM_Task, 'duration_secs': 0.453395} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.034280] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.034550] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.034550] env[62383]: value = "task-2452264" [ 1049.034550] env[62383]: _type = "Task" [ 1049.034550] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.035242] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8876830d-e44f-4338-b805-a720c3fa4c05 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.058381] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452264, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.059602] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2842a3-9b95-4632-93c9-f1b9a9442ba3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.234267] env[62383]: DEBUG nova.compute.manager [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-changed-2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1049.234438] env[62383]: DEBUG nova.compute.manager [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing instance network info cache due to event network-changed-2a9eb423-4347-4116-825d-0afad0e10ad1. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1049.234675] env[62383]: DEBUG oslo_concurrency.lockutils [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.234824] env[62383]: DEBUG oslo_concurrency.lockutils [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.235007] env[62383]: DEBUG nova.network.neutron [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing network info cache for port 2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1049.253488] env[62383]: DEBUG nova.objects.instance [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lazy-loading 'flavor' on Instance uuid 20861554-890b-4ad3-a73f-0c825a79bbf1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.279633] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.279887] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e1f5f3c-c0c4-4d93-a3bf-5c833def0a10 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.289131] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1049.289131] env[62383]: value = "task-2452265" [ 1049.289131] env[62383]: _type = "Task" [ 1049.289131] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.300030] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1049.300387] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1049.300387] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1049.301148] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbb08fc-34ce-495d-b856-0f64f71b67e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.319217] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36873da2-30e1-4e0f-9f90-86627b9a8e9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.327962] env[62383]: WARNING nova.virt.vmwareapi.driver [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1049.328198] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1049.329392] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ebc9d6a-e6e0-4c89-8d1b-c15bc6f5b34c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.338490] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.338857] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8e5569f-81cb-4370-a824-e55b86679bec {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.411175] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452263, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.413085] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.413639] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.413639] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleting the datastore file [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.413784] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54fdc971-9b6c-4826-9de5-352784d9a1c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.420406] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1049.420406] env[62383]: value = "task-2452267" [ 1049.420406] env[62383]: _type = "Task" [ 1049.420406] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.428240] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.499291] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9e59500-8e90-4afd-af4d-7046c403b41f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.500586] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.660s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.502275] env[62383]: INFO nova.compute.claims [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1049.548494] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452264, 'name': CreateVM_Task, 'duration_secs': 0.365559} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.548838] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1049.549565] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.549777] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.550188] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1049.550490] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17ceef3c-9b19-447f-90c4-0db022242e5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.556234] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1049.556234] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b8704f-04fa-be04-1e88-f5812052ec31" [ 1049.556234] env[62383]: _type = "Task" [ 1049.556234] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.565348] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b8704f-04fa-be04-1e88-f5812052ec31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.570097] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1049.570399] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ce17794a-ea51-42f7-8d92-c976076d86ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.577650] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1049.577650] env[62383]: value = "task-2452268" [ 1049.577650] env[62383]: _type = "Task" [ 1049.577650] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.587446] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452268, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.758585] env[62383]: DEBUG oslo_concurrency.lockutils [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1049.758829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquired lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.758969] env[62383]: DEBUG nova.network.neutron [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.759160] env[62383]: DEBUG nova.objects.instance [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lazy-loading 'info_cache' on Instance uuid 20861554-890b-4ad3-a73f-0c825a79bbf1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.907044] env[62383]: DEBUG oslo_vmware.api [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452263, 'name': PowerOnVM_Task, 'duration_secs': 0.727641} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.907311] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.930772] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176361} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.931073] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.931293] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.931493] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.945382] env[62383]: DEBUG nova.network.neutron [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updated VIF entry in instance network info cache for port 2a9eb423-4347-4116-825d-0afad0e10ad1. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1049.945765] env[62383]: DEBUG nova.network.neutron [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.019095] env[62383]: DEBUG nova.compute.manager [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.020286] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f96e595-2618-4b44-bf07-3b37653d66cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.070579] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b8704f-04fa-be04-1e88-f5812052ec31, 'name': SearchDatastore_Task, 'duration_secs': 0.011163} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.070964] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.071251] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.071516] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.071714] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.071901] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.072175] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80e56050-1b55-4afc-aeb5-c5c7ef81298c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.084212] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.084419] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.085803] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1a3d3fa-be17-4a5e-97d4-ae9eae06630c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.091598] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452268, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.094812] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1050.094812] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b19dc5-d05e-523c-ee9d-d2f9f97f161d" [ 1050.094812] env[62383]: _type = "Task" [ 1050.094812] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.102834] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b19dc5-d05e-523c-ee9d-d2f9f97f161d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.268126] env[62383]: DEBUG nova.objects.base [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Object Instance<20861554-890b-4ad3-a73f-0c825a79bbf1> lazy-loaded attributes: flavor,info_cache {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1050.436430] env[62383]: INFO nova.virt.block_device [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Booting with volume 656a8db8-6cf0-47a8-a1b4-ea050e96ecc1 at /dev/sdb [ 1050.449337] env[62383]: DEBUG oslo_concurrency.lockutils [req-51d7f35d-9311-4a14-97ef-ee764cab18cb req-44500a8c-edc1-4f0a-b610-84bfd77da2fe service nova] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.473448] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9254ebb4-441c-4330-86c2-a8c15af753f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.483466] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4461bdb1-ebb4-4f8b-b6ee-c996cc9bfedf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.542056] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3debbc4a-32f2-4455-acb9-2ad6c79adcf0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.548071] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4cc04d7-4bdb-4f69-83b2-6797905ebb0f tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 37.746s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.555951] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee43a46f-aafc-45cc-a396-dfbcd11cd1f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.592984] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452268, 'name': CreateSnapshot_Task, 'duration_secs': 0.776854} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.613392] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1050.615734] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d27194-9276-41e8-96a3-a0d450551f60 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.625125] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852bc632-c90b-4c16-aa60-418a07773d91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.648837] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa15e1a7-0d58-4e94-a4bb-0207a8466bb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.658776] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b19dc5-d05e-523c-ee9d-d2f9f97f161d, 'name': SearchDatastore_Task, 'duration_secs': 0.010115} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.664379] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b7b2a03-508a-4c9e-86e7-b665367776c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.671860] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1050.671860] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a4f38-f6e9-f1ae-2181-625b2393ee1a" [ 1050.671860] env[62383]: _type = "Task" [ 1050.671860] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.677600] env[62383]: DEBUG nova.virt.block_device [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updating existing volume attachment record: 0d32e10a-0e8d-4123-b8ce-95a48c3bab54 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1050.692824] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528a4f38-f6e9-f1ae-2181-625b2393ee1a, 'name': SearchDatastore_Task, 'duration_secs': 0.012404} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.694761] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.694761] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8d9d6f3b-aef7-478a-a43e-3b621f1b3845/8d9d6f3b-aef7-478a-a43e-3b621f1b3845.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.694761] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d1e0c7e-deb9-44f2-b560-2cd37a51e960 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.701395] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1050.701395] env[62383]: value = "task-2452269" [ 1050.701395] env[62383]: _type = "Task" [ 1050.701395] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.710015] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.806176] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf2720a-7839-4883-8694-b98a9e5df471 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.813478] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7059dc96-0222-4f2c-8cea-8a5be4b87fa2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.844763] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6e4d55-f2ac-4c24-a028-216a111639d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.854624] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b7e542-3618-41e9-aa77-3e14fe7352c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.869037] env[62383]: DEBUG nova.compute.provider_tree [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.047326] env[62383]: DEBUG nova.network.neutron [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Updating instance_info_cache with network_info: [{"id": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "address": "fa:16:3e:94:33:9c", "network": {"id": "81a92028-949b-47ea-a5fa-c0dbfd5e5571", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1764143122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "439cb1d6c59f4407921de2276ff4f0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc23a7aba-1a", "ovs_interfaceid": "c23a7aba-1afc-4edb-b3ee-d6e718b18392", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.174742] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1051.174742] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ea588644-8ded-47f2-a285-023f7e42b5c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.183000] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1051.183000] env[62383]: value = "task-2452270" [ 1051.183000] env[62383]: _type = "Task" [ 1051.183000] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.192543] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452270, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.212357] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452269, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.374471] env[62383]: DEBUG nova.scheduler.client.report [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.550634] env[62383]: DEBUG oslo_concurrency.lockutils [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Releasing lock "refresh_cache-20861554-890b-4ad3-a73f-0c825a79bbf1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.693969] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452270, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.711806] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452269, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533303} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.712099] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8d9d6f3b-aef7-478a-a43e-3b621f1b3845/8d9d6f3b-aef7-478a-a43e-3b621f1b3845.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.712323] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.712577] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-548b4e28-db81-4397-9fe3-1e6f2e8484b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.718877] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1051.718877] env[62383]: value = "task-2452271" [ 1051.718877] env[62383]: _type = "Task" [ 1051.718877] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.726178] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.820375] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68716a7-ab86-4c36-8bc6-8c634739fec8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.826899] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5da61a9-2670-4221-a2ef-30b40d1ac629 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Suspending the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1051.827100] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9fcdbc40-82f8-4bc8-b8d9-edef12f055d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.832655] env[62383]: DEBUG oslo_vmware.api [None req-f5da61a9-2670-4221-a2ef-30b40d1ac629 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1051.832655] env[62383]: value = "task-2452272" [ 1051.832655] env[62383]: _type = "Task" [ 1051.832655] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.840591] env[62383]: DEBUG oslo_vmware.api [None req-f5da61a9-2670-4221-a2ef-30b40d1ac629 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452272, 'name': SuspendVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.880104] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.880649] env[62383]: DEBUG nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1051.883350] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.917s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.884841] env[62383]: INFO nova.compute.claims [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.194515] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452270, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.229283] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079091} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.229583] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.230436] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63fb628-c4a2-4375-ba64-72c5eaffb20b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.252744] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 8d9d6f3b-aef7-478a-a43e-3b621f1b3845/8d9d6f3b-aef7-478a-a43e-3b621f1b3845.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.253088] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cffc67ae-2ead-409d-b24e-0a7ad28477d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.272773] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1052.272773] env[62383]: value = "task-2452273" [ 1052.272773] env[62383]: _type = "Task" [ 1052.272773] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.282111] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452273, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.343217] env[62383]: DEBUG oslo_vmware.api [None req-f5da61a9-2670-4221-a2ef-30b40d1ac629 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452272, 'name': SuspendVM_Task} progress is 58%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.389192] env[62383]: DEBUG nova.compute.utils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1052.393609] env[62383]: DEBUG nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1052.393817] env[62383]: DEBUG nova.network.neutron [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1052.444835] env[62383]: DEBUG nova.policy [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc391aae95a8405bab7801175514ac8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c15955328966463fa09401a270d95fe0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1052.557850] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.558279] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2294830-02be-41dd-b75d-ad5fc278119a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.566028] env[62383]: DEBUG oslo_vmware.api [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1052.566028] env[62383]: value = "task-2452274" [ 1052.566028] env[62383]: _type = "Task" [ 1052.566028] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.574991] env[62383]: DEBUG oslo_vmware.api [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452274, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.694684] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452270, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.752018] env[62383]: DEBUG nova.network.neutron [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Successfully created port: 4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1052.785822] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452273, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.819345] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1052.819700] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.819910] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1052.820131] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.820378] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1052.820471] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1052.820702] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1052.820895] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1052.821221] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1052.821323] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1052.821540] env[62383]: DEBUG nova.virt.hardware [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1052.822556] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bd813a-ccb7-4ef4-87d3-4b2d6de047f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.834354] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb5a3aa-2528-42f3-ae91-ebff6d8191b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.855284] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:82:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92d4aebb-165f-462e-96ea-53a36bc5eae8', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1052.862764] env[62383]: DEBUG oslo.service.loopingcall [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1052.865914] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1052.866225] env[62383]: DEBUG oslo_vmware.api [None req-f5da61a9-2670-4221-a2ef-30b40d1ac629 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452272, 'name': SuspendVM_Task, 'duration_secs': 0.999888} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.866418] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bac50e89-def2-4f15-8dcd-76588b3f7a79 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.880709] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f5da61a9-2670-4221-a2ef-30b40d1ac629 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Suspended the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1052.880933] env[62383]: DEBUG nova.compute.manager [None req-f5da61a9-2670-4221-a2ef-30b40d1ac629 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1052.881828] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b5f694-c619-46ed-a367-8c7df697cdb8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.891899] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1052.891899] env[62383]: value = "task-2452275" [ 1052.891899] env[62383]: _type = "Task" [ 1052.891899] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.895600] env[62383]: DEBUG nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1052.906259] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452275, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.077397] env[62383]: DEBUG oslo_vmware.api [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452274, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.105343] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9532c768-2854-4e37-97ce-19367881fea5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.112243] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd363f6b-c6c5-454b-a5bd-be05fcb45f17 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.144772] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c68d34-34e6-4610-92e9-b9af38ad5233 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.153470] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7e2ec6-a5f9-4d0f-b1a7-2f3ddba80454 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.168052] env[62383]: DEBUG nova.compute.provider_tree [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.196567] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452270, 'name': CloneVM_Task, 'duration_secs': 1.912551} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.196846] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Created linked-clone VM from snapshot [ 1053.197599] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28173a2-6ded-41bb-8a34-b38b7cede872 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.204937] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Uploading image 668a5589-791f-4966-838e-a17995d2fb51 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1053.226395] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1053.226395] env[62383]: value = "vm-496581" [ 1053.226395] env[62383]: _type = "VirtualMachine" [ 1053.226395] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1053.226657] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-52571c29-9d40-4e8c-833c-ffaea9a192df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.232877] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease: (returnval){ [ 1053.232877] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6f44b-ff62-080f-f40d-ecfff43c0573" [ 1053.232877] env[62383]: _type = "HttpNfcLease" [ 1053.232877] env[62383]: } obtained for exporting VM: (result){ [ 1053.232877] env[62383]: value = "vm-496581" [ 1053.232877] env[62383]: _type = "VirtualMachine" [ 1053.232877] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1053.233120] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the lease: (returnval){ [ 1053.233120] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6f44b-ff62-080f-f40d-ecfff43c0573" [ 1053.233120] env[62383]: _type = "HttpNfcLease" [ 1053.233120] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1053.239117] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1053.239117] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6f44b-ff62-080f-f40d-ecfff43c0573" [ 1053.239117] env[62383]: _type = "HttpNfcLease" [ 1053.239117] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1053.282450] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452273, 'name': ReconfigVM_Task, 'duration_secs': 0.59602} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.282660] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 8d9d6f3b-aef7-478a-a43e-3b621f1b3845/8d9d6f3b-aef7-478a-a43e-3b621f1b3845.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.285715] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00d7d725-58e2-4fe3-bb43-daf983bffe86 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.292043] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1053.292043] env[62383]: value = "task-2452277" [ 1053.292043] env[62383]: _type = "Task" [ 1053.292043] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.299650] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452277, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.421101] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452275, 'name': CreateVM_Task, 'duration_secs': 0.375415} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.421101] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1053.421101] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.421101] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.421101] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1053.421101] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4275140b-cffd-41de-9203-16be2a302f0b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.425650] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1053.425650] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b72d48-dd35-8717-f80a-9beac874b618" [ 1053.425650] env[62383]: _type = "Task" [ 1053.425650] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.433885] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b72d48-dd35-8717-f80a-9beac874b618, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.580020] env[62383]: DEBUG oslo_vmware.api [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452274, 'name': PowerOnVM_Task, 'duration_secs': 0.677998} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.580020] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.580020] env[62383]: DEBUG nova.compute.manager [None req-84aeb0a7-cb2d-48c0-8558-fa9dbb3d7062 tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.580020] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d0312e-37e8-4e2d-ad1d-202bec4d8b09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.671086] env[62383]: DEBUG nova.scheduler.client.report [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1053.741369] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1053.741369] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6f44b-ff62-080f-f40d-ecfff43c0573" [ 1053.741369] env[62383]: _type = "HttpNfcLease" [ 1053.741369] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1053.741666] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1053.741666] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c6f44b-ff62-080f-f40d-ecfff43c0573" [ 1053.741666] env[62383]: _type = "HttpNfcLease" [ 1053.741666] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1053.742398] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f192ee72-ff3c-44c6-8d75-4c63fa086cd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.749431] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52af24bd-25eb-6010-fc0a-1beca640061b/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1053.749610] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52af24bd-25eb-6010-fc0a-1beca640061b/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1053.812625] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452277, 'name': Rename_Task, 'duration_secs': 0.204615} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.812892] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.813137] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0456407c-c6c2-4589-9e80-3b48999f1571 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.819313] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1053.819313] env[62383]: value = "task-2452278" [ 1053.819313] env[62383]: _type = "Task" [ 1053.819313] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.826459] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452278, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.861196] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d2c3b7e9-0cec-4391-b64e-f9af3ff59820 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.910185] env[62383]: DEBUG nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1053.934059] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1053.934304] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.934462] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1053.934645] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.934793] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1053.934941] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1053.935170] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1053.935458] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1053.935498] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1053.935637] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1053.935806] env[62383]: DEBUG nova.virt.hardware [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1053.936608] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb3dd7c-5e6e-4dbc-8d1c-28fa41f9f9f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.942491] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b72d48-dd35-8717-f80a-9beac874b618, 'name': SearchDatastore_Task, 'duration_secs': 0.021711} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.943126] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.943362] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1053.943619] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.943767] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.943950] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1053.946153] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-124cc24c-81b4-4472-b8b3-981995a762f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.949585] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d565fc26-c67f-4c00-af09-c45a8c98e252 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.964065] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1053.964259] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1053.964940] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11ac87f4-ac4c-47bb-90bd-27bcad2509c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.969979] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1053.969979] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52aa916e-fd75-f692-ca78-be0de70b16c8" [ 1053.969979] env[62383]: _type = "Task" [ 1053.969979] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.978083] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52aa916e-fd75-f692-ca78-be0de70b16c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.161492] env[62383]: DEBUG nova.compute.manager [req-c39d2853-7bfd-4a9d-8d8f-6a3bbec21de9 req-23f0e801-e511-49b8-87f6-11fcd3c73f39 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Received event network-vif-plugged-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1054.161716] env[62383]: DEBUG oslo_concurrency.lockutils [req-c39d2853-7bfd-4a9d-8d8f-6a3bbec21de9 req-23f0e801-e511-49b8-87f6-11fcd3c73f39 service nova] Acquiring lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.161921] env[62383]: DEBUG oslo_concurrency.lockutils [req-c39d2853-7bfd-4a9d-8d8f-6a3bbec21de9 req-23f0e801-e511-49b8-87f6-11fcd3c73f39 service nova] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.162519] env[62383]: DEBUG oslo_concurrency.lockutils [req-c39d2853-7bfd-4a9d-8d8f-6a3bbec21de9 req-23f0e801-e511-49b8-87f6-11fcd3c73f39 service nova] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.162769] env[62383]: DEBUG nova.compute.manager [req-c39d2853-7bfd-4a9d-8d8f-6a3bbec21de9 req-23f0e801-e511-49b8-87f6-11fcd3c73f39 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] No waiting events found dispatching network-vif-plugged-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1054.162978] env[62383]: WARNING nova.compute.manager [req-c39d2853-7bfd-4a9d-8d8f-6a3bbec21de9 req-23f0e801-e511-49b8-87f6-11fcd3c73f39 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Received unexpected event network-vif-plugged-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d for instance with vm_state building and task_state spawning. [ 1054.175983] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.176535] env[62383]: DEBUG nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1054.179333] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.404s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.180892] env[62383]: INFO nova.compute.claims [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1054.249190] env[62383]: DEBUG nova.network.neutron [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Successfully updated port: 4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1054.330189] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452278, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.481906] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52aa916e-fd75-f692-ca78-be0de70b16c8, 'name': SearchDatastore_Task, 'duration_secs': 0.014724} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.483064] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-732e1a7f-043d-4d55-be51-ecd2ba944067 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.489471] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1054.489471] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ce656e-6a94-c23b-7a52-7d949bb67ec3" [ 1054.489471] env[62383]: _type = "Task" [ 1054.489471] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.499240] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ce656e-6a94-c23b-7a52-7d949bb67ec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.618398] env[62383]: INFO nova.compute.manager [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Resuming [ 1054.619517] env[62383]: DEBUG nova.objects.instance [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lazy-loading 'flavor' on Instance uuid c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.687275] env[62383]: DEBUG nova.compute.utils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1054.690440] env[62383]: DEBUG nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1054.694627] env[62383]: DEBUG nova.network.neutron [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1054.747271] env[62383]: DEBUG nova.policy [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cce49ef14f3a474c9448607425da3dc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2439f3d802f34027b12d50f242a54ba3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1054.751599] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.751769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.751924] env[62383]: DEBUG nova.network.neutron [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1054.835379] env[62383]: DEBUG oslo_vmware.api [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452278, 'name': PowerOnVM_Task, 'duration_secs': 0.519878} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.835758] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1054.836257] env[62383]: INFO nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1054.836598] env[62383]: DEBUG nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.837718] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62988608-6b50-4f31-adb3-782d63a02126 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.000518] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ce656e-6a94-c23b-7a52-7d949bb67ec3, 'name': SearchDatastore_Task, 'duration_secs': 0.012349} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.000793] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.001286] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1055.001684] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b90c060-e727-460f-b2a5-bbce619de89c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.008745] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1055.008745] env[62383]: value = "task-2452279" [ 1055.008745] env[62383]: _type = "Task" [ 1055.008745] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.019163] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.068618] env[62383]: DEBUG nova.network.neutron [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Successfully created port: e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1055.189026] env[62383]: DEBUG nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1055.306512] env[62383]: DEBUG nova.network.neutron [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1055.364691] env[62383]: INFO nova.compute.manager [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Took 16.88 seconds to build instance. [ 1055.445205] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756e6915-9762-4030-bec3-6145e580fc9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.453949] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778507b8-18f3-4085-baf7-ba1834f605ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.495993] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0606c2-0d4c-45aa-921c-d50c440f3724 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.506604] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9877546d-3eb2-4c79-9884-667338b0495f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.524908] env[62383]: DEBUG nova.compute.provider_tree [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.529593] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452279, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.555624] env[62383]: DEBUG nova.network.neutron [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Updating instance_info_cache with network_info: [{"id": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "address": "fa:16:3e:56:8f:e8", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c1ffea5-d0", "ovs_interfaceid": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.868112] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cf11e323-d3a8-4bde-b84e-09a7fcccf146 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.393s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.025773] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452279, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.756176} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.026861] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1056.027145] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1056.027500] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8fb38357-6841-4ac1-8f05-715163939c1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.031900] env[62383]: DEBUG nova.scheduler.client.report [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.038318] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1056.038318] env[62383]: value = "task-2452280" [ 1056.038318] env[62383]: _type = "Task" [ 1056.038318] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.048839] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452280, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.058723] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.059151] env[62383]: DEBUG nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Instance network_info: |[{"id": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "address": "fa:16:3e:56:8f:e8", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c1ffea5-d0", "ovs_interfaceid": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1056.059954] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:8f:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c1ffea5-d09f-4f98-bbe4-f02d40cad88d', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.076643] env[62383]: DEBUG oslo.service.loopingcall [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.080493] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1056.080493] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69559e96-8b04-4c6a-8518-d0b52c8482ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.100224] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.100224] env[62383]: value = "task-2452281" [ 1056.100224] env[62383]: _type = "Task" [ 1056.100224] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.108014] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452281, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.129795] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.130051] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquired lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.130222] env[62383]: DEBUG nova.network.neutron [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1056.200369] env[62383]: DEBUG nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1056.231139] env[62383]: DEBUG nova.compute.manager [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Received event network-changed-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1056.231472] env[62383]: DEBUG nova.compute.manager [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Refreshing instance network info cache due to event network-changed-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1056.231778] env[62383]: DEBUG oslo_concurrency.lockutils [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] Acquiring lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.231955] env[62383]: DEBUG oslo_concurrency.lockutils [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] Acquired lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.232170] env[62383]: DEBUG nova.network.neutron [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Refreshing network info cache for port 4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1056.241329] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1056.241575] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.241736] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1056.241918] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.242104] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1056.242329] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1056.242487] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1056.242647] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1056.242815] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1056.242979] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1056.243871] env[62383]: DEBUG nova.virt.hardware [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1056.245052] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d450a8-f54e-44bb-b904-404e0544afb0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.258342] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5fa648-badb-48b0-a5b4-cd96ab6ac588 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.540755] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.541409] env[62383]: DEBUG nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1056.553658] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452280, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064924} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.554012] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1056.554725] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473a2d81-e4f7-4c9b-b6d9-5bfa997f0582 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.578018] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1056.578763] env[62383]: DEBUG nova.network.neutron [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Successfully updated port: e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1056.579940] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d79729e-be03-42e7-b8e4-1d6f1640d3bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.606535] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1056.606535] env[62383]: value = "task-2452282" [ 1056.606535] env[62383]: _type = "Task" [ 1056.606535] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.610893] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452281, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.619168] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452282, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.880215] env[62383]: DEBUG nova.network.neutron [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [{"id": "0afca8d2-b019-4a25-af28-7061dbf32e28", "address": "fa:16:3e:e8:5a:70", "network": {"id": "c6904ede-b95c-4913-86cf-9512049bcb8f", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1603053986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9f2dba3783e48968554ca75be01cd5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b36c5ae6-c344-4bd1-8239-29128e2bbfbf", "external-id": "nsx-vlan-transportzone-214", "segmentation_id": 214, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afca8d2-b0", "ovs_interfaceid": "0afca8d2-b019-4a25-af28-7061dbf32e28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.928842] env[62383]: DEBUG nova.compute.manager [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-changed-2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1056.929113] env[62383]: DEBUG nova.compute.manager [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing instance network info cache due to event network-changed-2a9eb423-4347-4116-825d-0afad0e10ad1. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1056.929442] env[62383]: DEBUG oslo_concurrency.lockutils [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.929602] env[62383]: DEBUG oslo_concurrency.lockutils [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.929830] env[62383]: DEBUG nova.network.neutron [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing network info cache for port 2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1057.049980] env[62383]: DEBUG nova.compute.utils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1057.051578] env[62383]: DEBUG nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1057.052133] env[62383]: DEBUG nova.network.neutron [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1057.097351] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.097610] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.097786] env[62383]: DEBUG nova.network.neutron [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1057.100833] env[62383]: DEBUG nova.policy [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c294a0cc4e6446afabfb754ba2437a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83304cfb5deb443880252c194e249565', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1057.102826] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "690dca62-cafb-40f7-92f0-9bbfde3467b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.103186] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.103483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "690dca62-cafb-40f7-92f0-9bbfde3467b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.103754] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.103910] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.111654] env[62383]: INFO nova.compute.manager [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Terminating instance [ 1057.114526] env[62383]: DEBUG nova.network.neutron [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Updated VIF entry in instance network info cache for port 4c1ffea5-d09f-4f98-bbe4-f02d40cad88d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.115741] env[62383]: DEBUG nova.network.neutron [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Updating instance_info_cache with network_info: [{"id": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "address": "fa:16:3e:56:8f:e8", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c1ffea5-d0", "ovs_interfaceid": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.139531] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452282, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.139809] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452281, 'name': CreateVM_Task, 'duration_secs': 0.662592} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.142585] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1057.143653] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.143851] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.144202] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1057.148020] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-180a47f2-6010-4d68-8b4b-93309bf675eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.150318] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1057.150318] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5265b3df-cf67-1579-b250-182bd492021c" [ 1057.150318] env[62383]: _type = "Task" [ 1057.150318] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.162423] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5265b3df-cf67-1579-b250-182bd492021c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.385018] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Releasing lock "refresh_cache-c56464dd-63af-4686-b666-d0ac2df01ec1" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.385376] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35754eb6-6457-4aee-80a0-6babe9c36fdd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.392570] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Resuming the VM {{(pid=62383) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1057.392828] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c33efaa-205b-4adc-8a46-02d108be3432 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.399975] env[62383]: DEBUG oslo_vmware.api [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1057.399975] env[62383]: value = "task-2452283" [ 1057.399975] env[62383]: _type = "Task" [ 1057.399975] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.413020] env[62383]: DEBUG oslo_vmware.api [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.420470] env[62383]: DEBUG nova.network.neutron [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Successfully created port: 3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1057.556580] env[62383]: DEBUG nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1057.618705] env[62383]: DEBUG oslo_concurrency.lockutils [req-260f1992-07c8-442a-8604-a48a9775b688 req-a278e7c3-23b7-4271-af98-7ff3a7fa8f03 service nova] Releasing lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.628602] env[62383]: DEBUG nova.compute.manager [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1057.628860] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.629213] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452282, 'name': ReconfigVM_Task, 'duration_secs': 0.592572} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.629930] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004c79ec-a84d-468e-899a-4c8729eda606 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.632893] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3/9f8e346e-815c-492d-84a9-00ebdca3bcc3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1057.634227] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'encryption_options': None, 'size': 0, 'device_type': 'disk', 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'encrypted': False, 'guest_format': None, 'boot_index': 0, 'disk_bus': None, 'image_id': 'cac3b430-a1d5-4ad1-92ec-34c2261779a8'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '0d32e10a-0e8d-4123-b8ce-95a48c3bab54', 'device_type': None, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'}, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62383) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1057.634429] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1057.634617] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1057.635369] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435d4941-8209-4467-a1eb-8e2d5bd7df6c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.643860] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.656290] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-439922f4-f297-4b56-87ec-4ad9cd1e8ab1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.659702] env[62383]: DEBUG nova.network.neutron [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1057.665834] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feeb7e5f-8b73-481b-9eca-84fd5783163d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.675416] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5265b3df-cf67-1579-b250-182bd492021c, 'name': SearchDatastore_Task, 'duration_secs': 0.015414} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.689260] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.689630] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.689768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.690084] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.690170] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.690454] env[62383]: DEBUG oslo_vmware.api [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1057.690454] env[62383]: value = "task-2452284" [ 1057.690454] env[62383]: _type = "Task" [ 1057.690454] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.697829] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1/volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.700394] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd221429-6871-4683-a56f-d79a6e08f9a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.702289] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9489e9ce-cedd-4f67-882e-d8d9f4a4d8b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.732021] env[62383]: DEBUG oslo_vmware.api [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452284, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.732021] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1057.732021] env[62383]: value = "task-2452285" [ 1057.732021] env[62383]: _type = "Task" [ 1057.732021] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.732021] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.732021] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1057.732021] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e976cf79-b7ca-4e3e-a786-2537b7b64705 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.740457] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1057.740457] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5298776e-257b-d2dd-a40b-6ca05fe47f6d" [ 1057.740457] env[62383]: _type = "Task" [ 1057.740457] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.743908] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452285, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.755126] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5298776e-257b-d2dd-a40b-6ca05fe47f6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.758813] env[62383]: DEBUG nova.network.neutron [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updated VIF entry in instance network info cache for port 2a9eb423-4347-4116-825d-0afad0e10ad1. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1057.759258] env[62383]: DEBUG nova.network.neutron [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.850534] env[62383]: DEBUG nova.network.neutron [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape822f89d-51", "ovs_interfaceid": "e822f89d-516c-4eab-bd54-f1369994f514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.913468] env[62383]: DEBUG oslo_vmware.api [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452283, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.213028] env[62383]: DEBUG oslo_vmware.api [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452284, 'name': PowerOffVM_Task, 'duration_secs': 0.281452} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.213028] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.213028] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.213260] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f61464d9-4aad-4c44-bfce-f9792c9dfead {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.240845] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452285, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.257479] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5298776e-257b-d2dd-a40b-6ca05fe47f6d, 'name': SearchDatastore_Task, 'duration_secs': 0.018418} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.258448] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72d1176-7432-47e4-8a4d-efb0f7ec4aab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.262613] env[62383]: DEBUG oslo_concurrency.lockutils [req-3a516bab-e555-447c-99de-a798b99fcb64 req-cc6fa960-e7a0-4d3a-a3aa-84091a0745c3 service nova] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.269686] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1058.269686] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52779e3a-51c8-dcc4-96c5-09b4daed0c30" [ 1058.269686] env[62383]: _type = "Task" [ 1058.269686] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.280071] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52779e3a-51c8-dcc4-96c5-09b4daed0c30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.296685] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.296920] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.297129] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleting the datastore file [datastore2] 690dca62-cafb-40f7-92f0-9bbfde3467b6 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.297406] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91669779-dfd3-48e2-bdb4-0e1a7f8d1702 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.303550] env[62383]: DEBUG oslo_vmware.api [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1058.303550] env[62383]: value = "task-2452287" [ 1058.303550] env[62383]: _type = "Task" [ 1058.303550] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.305910] env[62383]: DEBUG nova.compute.manager [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-vif-plugged-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1058.306131] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.306342] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.306508] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.306676] env[62383]: DEBUG nova.compute.manager [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] No waiting events found dispatching network-vif-plugged-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1058.306842] env[62383]: WARNING nova.compute.manager [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received unexpected event network-vif-plugged-e822f89d-516c-4eab-bd54-f1369994f514 for instance with vm_state building and task_state spawning. [ 1058.306997] env[62383]: DEBUG nova.compute.manager [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-changed-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1058.307158] env[62383]: DEBUG nova.compute.manager [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing instance network info cache due to event network-changed-e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1058.307325] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1058.316353] env[62383]: DEBUG oslo_vmware.api [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.353787] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.354188] env[62383]: DEBUG nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance network_info: |[{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape822f89d-51", "ovs_interfaceid": "e822f89d-516c-4eab-bd54-f1369994f514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1058.354561] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.354722] env[62383]: DEBUG nova.network.neutron [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing network info cache for port e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.356145] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:0a:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e822f89d-516c-4eab-bd54-f1369994f514', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1058.363883] env[62383]: DEBUG oslo.service.loopingcall [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.364977] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1058.365245] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f35e1e9-1b39-4cdf-92d4-1377b5d891d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.386877] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1058.386877] env[62383]: value = "task-2452288" [ 1058.386877] env[62383]: _type = "Task" [ 1058.386877] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.395787] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452288, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.409874] env[62383]: DEBUG oslo_vmware.api [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452283, 'name': PowerOnVM_Task, 'duration_secs': 0.872573} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.410148] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Resumed the VM {{(pid=62383) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1058.410358] env[62383]: DEBUG nova.compute.manager [None req-fba66989-bee8-451b-b984-11e7a3cc3a05 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1058.411206] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3131560d-067e-4762-b0da-2c354787e2eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.568911] env[62383]: DEBUG nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1058.603039] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1058.603039] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.603039] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1058.603039] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.603519] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1058.603875] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1058.606138] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1058.606138] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1058.606138] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1058.606138] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1058.606138] env[62383]: DEBUG nova.virt.hardware [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1058.606138] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f865fd9-4492-4efb-9d38-f7604f954647 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.614370] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b450114f-6c55-467c-863a-cb1266f2255c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.741323] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452285, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.781588] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52779e3a-51c8-dcc4-96c5-09b4daed0c30, 'name': SearchDatastore_Task, 'duration_secs': 0.025338} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.781925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.782203] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 981aa014-4861-4ab3-94e3-c113eec9bf29/981aa014-4861-4ab3-94e3-c113eec9bf29.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1058.782592] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3237601-bcac-4f3b-afde-9ea892c4c607 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.789060] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1058.789060] env[62383]: value = "task-2452289" [ 1058.789060] env[62383]: _type = "Task" [ 1058.789060] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.798017] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.813815] env[62383]: DEBUG oslo_vmware.api [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.487659} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.814481] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.814481] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.814603] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.814796] env[62383]: INFO nova.compute.manager [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1058.814987] env[62383]: DEBUG oslo.service.loopingcall [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.815207] env[62383]: DEBUG nova.compute.manager [-] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1058.815306] env[62383]: DEBUG nova.network.neutron [-] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.897340] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452288, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.095607] env[62383]: DEBUG nova.network.neutron [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Successfully updated port: 3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1059.217876] env[62383]: DEBUG nova.network.neutron [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updated VIF entry in instance network info cache for port e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.218433] env[62383]: DEBUG nova.network.neutron [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape822f89d-51", "ovs_interfaceid": "e822f89d-516c-4eab-bd54-f1369994f514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.242916] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452285, 'name': ReconfigVM_Task, 'duration_secs': 1.05536} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.242916] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfigured VM instance instance-0000005c to attach disk [datastore2] volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1/volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1059.247984] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73823027-c1fc-4dfa-a5ee-a7955c9703fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.264765] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1059.264765] env[62383]: value = "task-2452290" [ 1059.264765] env[62383]: _type = "Task" [ 1059.264765] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.274363] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452290, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.299323] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452289, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.403735] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452288, 'name': CreateVM_Task, 'duration_secs': 0.581857} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.403924] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1059.404663] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.404831] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.405181] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1059.405453] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-042e716a-dc8d-488a-bc78-bfc90b76e820 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.410389] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1059.410389] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f51794-6750-4562-7bfa-41362466f426" [ 1059.410389] env[62383]: _type = "Task" [ 1059.410389] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.420299] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f51794-6750-4562-7bfa-41362466f426, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.598909] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.599104] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.599239] env[62383]: DEBUG nova.network.neutron [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.691731] env[62383]: DEBUG nova.network.neutron [-] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.721100] env[62383]: DEBUG oslo_concurrency.lockutils [req-eb37279b-6acb-4661-b831-8b977496ba17 req-702d42c4-5cac-4558-b0a4-7620045c391b service nova] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.775835] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452290, 'name': ReconfigVM_Task, 'duration_secs': 0.37178} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.776260] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1059.776851] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d252a97e-d656-49dc-83d3-99d4d0403ec1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.783462] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1059.783462] env[62383]: value = "task-2452291" [ 1059.783462] env[62383]: _type = "Task" [ 1059.783462] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.792041] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452291, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.799608] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452289, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766696} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.799840] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 981aa014-4861-4ab3-94e3-c113eec9bf29/981aa014-4861-4ab3-94e3-c113eec9bf29.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1059.800094] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1059.800374] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db1cc17f-a277-4db9-8c99-f11d8c9e880d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.806915] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1059.806915] env[62383]: value = "task-2452292" [ 1059.806915] env[62383]: _type = "Task" [ 1059.806915] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.815499] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452292, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.922292] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f51794-6750-4562-7bfa-41362466f426, 'name': SearchDatastore_Task, 'duration_secs': 0.066354} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.923013] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.923013] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1059.923184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.923251] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.923404] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1059.923733] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e3dd5eb-838c-4e07-b770-9a888999ba6b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.937171] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1059.937415] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1059.938291] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eca8e159-afd5-4f43-b3e6-827a4f5b1d58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.944436] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1059.944436] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522ad668-d8de-0d69-a330-7891312430df" [ 1059.944436] env[62383]: _type = "Task" [ 1059.944436] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.952708] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522ad668-d8de-0d69-a330-7891312430df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.146945] env[62383]: DEBUG nova.network.neutron [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1060.194482] env[62383]: INFO nova.compute.manager [-] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Took 1.38 seconds to deallocate network for instance. [ 1060.296117] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452291, 'name': Rename_Task, 'duration_secs': 0.311261} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.296407] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.296724] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1294c768-431d-4b15-9de6-d806c013dae5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.304068] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1060.304068] env[62383]: value = "task-2452293" [ 1060.304068] env[62383]: _type = "Task" [ 1060.304068] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.314377] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452293, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.317177] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452292, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122515} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.317449] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1060.318247] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fa7a55-95e7-4adb-bcdb-1086d05fb76c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.341591] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 981aa014-4861-4ab3-94e3-c113eec9bf29/981aa014-4861-4ab3-94e3-c113eec9bf29.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1060.342836] env[62383]: DEBUG nova.network.neutron [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Updating instance_info_cache with network_info: [{"id": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "address": "fa:16:3e:cc:4d:0a", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e70fa62-b8", "ovs_interfaceid": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.345097] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3aceabac-a421-4219-9eaa-5a3d56b5afad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.361425] env[62383]: DEBUG nova.compute.manager [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Received event network-vif-plugged-3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1060.361666] env[62383]: DEBUG oslo_concurrency.lockutils [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] Acquiring lock "de24aca8-30fc-453e-b192-b6bb115876ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.361888] env[62383]: DEBUG oslo_concurrency.lockutils [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] Lock "de24aca8-30fc-453e-b192-b6bb115876ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.362068] env[62383]: DEBUG oslo_concurrency.lockutils [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] Lock "de24aca8-30fc-453e-b192-b6bb115876ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1060.362238] env[62383]: DEBUG nova.compute.manager [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] No waiting events found dispatching network-vif-plugged-3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1060.362401] env[62383]: WARNING nova.compute.manager [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Received unexpected event network-vif-plugged-3e70fa62-b81f-4cf7-950b-772addf79f9c for instance with vm_state building and task_state spawning. [ 1060.362558] env[62383]: DEBUG nova.compute.manager [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Received event network-changed-3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1060.362709] env[62383]: DEBUG nova.compute.manager [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Refreshing instance network info cache due to event network-changed-3e70fa62-b81f-4cf7-950b-772addf79f9c. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1060.362872] env[62383]: DEBUG oslo_concurrency.lockutils [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] Acquiring lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1060.363949] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.364237] env[62383]: DEBUG nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Instance network_info: |[{"id": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "address": "fa:16:3e:cc:4d:0a", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e70fa62-b8", "ovs_interfaceid": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1060.364749] env[62383]: DEBUG oslo_concurrency.lockutils [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] Acquired lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.364923] env[62383]: DEBUG nova.network.neutron [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Refreshing network info cache for port 3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1060.366023] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:4d:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '01fe2e08-46f6-4cee-aefd-934461f8077d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e70fa62-b81f-4cf7-950b-772addf79f9c', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1060.373538] env[62383]: DEBUG oslo.service.loopingcall [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1060.374907] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1060.375686] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce367cfc-4988-4060-82b7-e35413e7bc99 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.394444] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1060.394444] env[62383]: value = "task-2452294" [ 1060.394444] env[62383]: _type = "Task" [ 1060.394444] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.400500] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1060.400500] env[62383]: value = "task-2452295" [ 1060.400500] env[62383]: _type = "Task" [ 1060.400500] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.404427] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.416241] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452295, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.456488] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522ad668-d8de-0d69-a330-7891312430df, 'name': SearchDatastore_Task, 'duration_secs': 0.02543} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.457558] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08d11905-caea-4ff1-a9b4-cedc3e8db839 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.463729] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1060.463729] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52634115-118f-4678-2b69-26d0b59b50cb" [ 1060.463729] env[62383]: _type = "Task" [ 1060.463729] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.474621] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52634115-118f-4678-2b69-26d0b59b50cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.701207] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.701523] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.701761] env[62383]: DEBUG nova.objects.instance [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lazy-loading 'resources' on Instance uuid 690dca62-cafb-40f7-92f0-9bbfde3467b6 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.814429] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452293, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.905208] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.914475] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452295, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.974852] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52634115-118f-4678-2b69-26d0b59b50cb, 'name': SearchDatastore_Task, 'duration_secs': 0.01746} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.975181] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1060.975485] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1060.975764] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98fb74af-42ba-43ff-9e25-c250e86bc18d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.981664] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1060.981664] env[62383]: value = "task-2452296" [ 1060.981664] env[62383]: _type = "Task" [ 1060.981664] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.989984] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452296, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.324476] env[62383]: DEBUG oslo_vmware.api [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452293, 'name': PowerOnVM_Task, 'duration_secs': 0.788131} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.324926] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.325175] env[62383]: DEBUG nova.compute.manager [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1061.326080] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82386fc-f2dd-4618-ab82-28cf594e46ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.404709] env[62383]: DEBUG nova.network.neutron [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Updated VIF entry in instance network info cache for port 3e70fa62-b81f-4cf7-950b-772addf79f9c. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1061.405509] env[62383]: DEBUG nova.network.neutron [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Updating instance_info_cache with network_info: [{"id": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "address": "fa:16:3e:cc:4d:0a", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e70fa62-b8", "ovs_interfaceid": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.420097] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452294, 'name': ReconfigVM_Task, 'duration_secs': 0.579646} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.421683] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 981aa014-4861-4ab3-94e3-c113eec9bf29/981aa014-4861-4ab3-94e3-c113eec9bf29.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1061.426119] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e54193c-ac60-477f-89de-8a52bd0fdc3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.428433] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452295, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.437760] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1061.437760] env[62383]: value = "task-2452297" [ 1061.437760] env[62383]: _type = "Task" [ 1061.437760] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.449444] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452297, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.451489] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1501fae-e91e-41bc-85a6-c663a92df451 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.459064] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ddee38-0b2a-434e-9139-419e23f683e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.498938] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0285257-7697-4494-881b-74c55f3c6669 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.510869] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452296, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.512063] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8ea780-4328-454f-addb-d2612f623120 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.530253] env[62383]: DEBUG nova.compute.provider_tree [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.849527] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.916500] env[62383]: DEBUG oslo_concurrency.lockutils [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] Releasing lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.916768] env[62383]: DEBUG nova.compute.manager [req-91a38904-3189-4903-a69f-e2f81e99dddb req-483779ac-78a4-4840-8ce9-966aa11b64d3 service nova] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Received event network-vif-deleted-b55c61d2-6116-422c-9da3-9f29d3174451 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1061.917101] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452295, 'name': CreateVM_Task, 'duration_secs': 1.412064} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.917256] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.917878] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.918469] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.918469] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1061.918597] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceb8afa9-4e27-4eea-8c4b-c5f93750765b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.923371] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1061.923371] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52233bd5-b010-8df3-11f9-5a1a3813c82b" [ 1061.923371] env[62383]: _type = "Task" [ 1061.923371] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.930765] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52233bd5-b010-8df3-11f9-5a1a3813c82b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.946299] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452297, 'name': Rename_Task, 'duration_secs': 0.261922} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.946546] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.946777] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56917492-5c83-425c-bc33-4830d5aa7094 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.952646] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1061.952646] env[62383]: value = "task-2452298" [ 1061.952646] env[62383]: _type = "Task" [ 1061.952646] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.961315] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452298, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.003201] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452296, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.601501} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.003466] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1062.003680] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.003942] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7863c78c-15c9-4231-91f8-169d6b6f350b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.010657] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1062.010657] env[62383]: value = "task-2452299" [ 1062.010657] env[62383]: _type = "Task" [ 1062.010657] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.018360] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452299, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.033906] env[62383]: DEBUG nova.scheduler.client.report [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1062.433418] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52233bd5-b010-8df3-11f9-5a1a3813c82b, 'name': SearchDatastore_Task, 'duration_secs': 0.011937} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.433759] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.434034] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.434288] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.434452] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.434661] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.434929] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f07da003-9e7d-4f6b-968f-f05e862f804f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.444418] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.444680] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.445550] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3403219-288b-4abd-b439-bb59afc602e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.451811] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1062.451811] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f87128-124e-e953-f86a-c9a47edf89a5" [ 1062.451811] env[62383]: _type = "Task" [ 1062.451811] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.464596] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f87128-124e-e953-f86a-c9a47edf89a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.468358] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452298, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.520331] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452299, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076082} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.520684] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.521571] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6886d276-9f5d-4507-a20c-71a657972f72 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.543780] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.544571] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.843s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.546564] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a92e60d4-e3b6-4192-bf12-b8ea23188e55 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.561791] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.713s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.562015] env[62383]: DEBUG nova.objects.instance [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1062.569953] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1062.569953] env[62383]: value = "task-2452300" [ 1062.569953] env[62383]: _type = "Task" [ 1062.569953] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.577927] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.585559] env[62383]: INFO nova.scheduler.client.report [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleted allocations for instance 690dca62-cafb-40f7-92f0-9bbfde3467b6 [ 1062.971091] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f87128-124e-e953-f86a-c9a47edf89a5, 'name': SearchDatastore_Task, 'duration_secs': 0.011352} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.975097] env[62383]: DEBUG oslo_vmware.api [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452298, 'name': PowerOnVM_Task, 'duration_secs': 0.887642} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.975520] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd66fdce-a429-4468-9d8e-96b432dc7147 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.978065] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.978450] env[62383]: INFO nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1062.978764] env[62383]: DEBUG nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1062.979626] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f8a22c-9d43-4c78-9fc8-37cc7591feba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.985953] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1062.985953] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52867faf-0f2c-14a9-7c12-d5a928f27676" [ 1062.985953] env[62383]: _type = "Task" [ 1062.985953] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.999965] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52867faf-0f2c-14a9-7c12-d5a928f27676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.080497] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452300, 'name': ReconfigVM_Task, 'duration_secs': 0.467364} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.080675] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1063.081327] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67111cf5-7248-49f5-8e44-bb2465437c06 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.087859] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1063.087859] env[62383]: value = "task-2452301" [ 1063.087859] env[62383]: _type = "Task" [ 1063.087859] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.093636] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2862c51c-e59f-4fef-bc5a-e1c08bd904da tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "690dca62-cafb-40f7-92f0-9bbfde3467b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.990s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.098441] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452301, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.505046] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52867faf-0f2c-14a9-7c12-d5a928f27676, 'name': SearchDatastore_Task, 'duration_secs': 0.017567} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.506693] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.506972] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] de24aca8-30fc-453e-b192-b6bb115876ef/de24aca8-30fc-453e-b192-b6bb115876ef.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1063.507496] env[62383]: INFO nova.compute.manager [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Took 20.69 seconds to build instance. [ 1063.508455] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13561347-df4b-413f-8d0e-8abb66ebf6b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.517277] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1063.517277] env[62383]: value = "task-2452302" [ 1063.517277] env[62383]: _type = "Task" [ 1063.517277] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.529304] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.571058] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2a7b4938-eb30-41f6-b48f-dcd7388df91e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.599972] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452301, 'name': Rename_Task, 'duration_secs': 0.192844} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.599972] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.600321] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-880864f3-7a3b-4191-b195-7098d0357392 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.607576] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1063.607576] env[62383]: value = "task-2452303" [ 1063.607576] env[62383]: _type = "Task" [ 1063.607576] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.618556] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.869120] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "8f639983-e7ef-4a63-94b6-5c5256015937" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.869446] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "8f639983-e7ef-4a63-94b6-5c5256015937" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.869602] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "8f639983-e7ef-4a63-94b6-5c5256015937-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.869785] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "8f639983-e7ef-4a63-94b6-5c5256015937-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.869957] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "8f639983-e7ef-4a63-94b6-5c5256015937-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.872351] env[62383]: INFO nova.compute.manager [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Terminating instance [ 1063.890105] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52af24bd-25eb-6010-fc0a-1beca640061b/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1063.891339] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ed4c16-b4c9-465d-9d43-ae884c369a4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.899321] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52af24bd-25eb-6010-fc0a-1beca640061b/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1063.899550] env[62383]: ERROR oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52af24bd-25eb-6010-fc0a-1beca640061b/disk-0.vmdk due to incomplete transfer. [ 1063.899813] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a3027d6a-8631-4fed-97d4-dc936a4d915c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.908172] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52af24bd-25eb-6010-fc0a-1beca640061b/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1063.908277] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Uploaded image 668a5589-791f-4966-838e-a17995d2fb51 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1063.911300] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1063.912184] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4556827e-593d-4f12-a2e8-f6b414e31708 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.922707] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1063.922707] env[62383]: value = "task-2452304" [ 1063.922707] env[62383]: _type = "Task" [ 1063.922707] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.936837] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452304, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.013564] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e5beee9f-0757-4529-b740-2443e6b4b793 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.206s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.030782] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452302, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.118543] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452303, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.148347] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.148697] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.148865] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.149071] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.149275] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.151888] env[62383]: INFO nova.compute.manager [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Terminating instance [ 1064.377675] env[62383]: DEBUG nova.compute.manager [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.378016] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.379185] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c313d4b-74d3-4698-8d68-62b4cae262df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.388067] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.388419] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f641f4c-df69-4829-b163-9d36de9bd4f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.395189] env[62383]: DEBUG oslo_vmware.api [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1064.395189] env[62383]: value = "task-2452305" [ 1064.395189] env[62383]: _type = "Task" [ 1064.395189] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.403631] env[62383]: DEBUG oslo_vmware.api [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452305, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.437026] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452304, 'name': Destroy_Task} progress is 33%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.529032] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.620232} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.529032] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] de24aca8-30fc-453e-b192-b6bb115876ef/de24aca8-30fc-453e-b192-b6bb115876ef.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1064.529496] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.529496] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d70a0c59-1994-40fd-9773-7a98d6458044 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.535868] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1064.535868] env[62383]: value = "task-2452306" [ 1064.535868] env[62383]: _type = "Task" [ 1064.535868] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.544467] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452306, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.617314] env[62383]: DEBUG oslo_vmware.api [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452303, 'name': PowerOnVM_Task, 'duration_secs': 0.905717} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.617590] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1064.617979] env[62383]: INFO nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1064.617979] env[62383]: DEBUG nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.618864] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c37089d-dc1d-43e6-8ca0-42669df16d39 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.656050] env[62383]: DEBUG nova.compute.manager [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.656359] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.657247] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5363a2-b145-4647-942b-3057f66933fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.665677] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.665913] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-551fb0aa-8539-407a-bc32-e3c46d7766e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.671037] env[62383]: DEBUG oslo_vmware.api [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1064.671037] env[62383]: value = "task-2452307" [ 1064.671037] env[62383]: _type = "Task" [ 1064.671037] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.678473] env[62383]: DEBUG oslo_vmware.api [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.801859] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "981aa014-4861-4ab3-94e3-c113eec9bf29" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.802088] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.802275] env[62383]: INFO nova.compute.manager [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Shelving [ 1064.906705] env[62383]: DEBUG oslo_vmware.api [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452305, 'name': PowerOffVM_Task, 'duration_secs': 0.225905} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.907060] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.907278] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.907526] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a78fa4a-59d4-410d-a11c-331f7f9dd390 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.931713] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452304, 'name': Destroy_Task, 'duration_secs': 0.668633} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.932031] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Destroyed the VM [ 1064.932319] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1064.932645] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-973305c2-70c9-4424-bb91-34b5b333e3a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.939279] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1064.939279] env[62383]: value = "task-2452309" [ 1064.939279] env[62383]: _type = "Task" [ 1064.939279] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.946942] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452309, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.972826] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.973067] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.973258] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleting the datastore file [datastore2] 8f639983-e7ef-4a63-94b6-5c5256015937 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.973609] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53adad8d-86ab-4dbf-84f4-e92ed2912b26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.980051] env[62383]: DEBUG oslo_vmware.api [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1064.980051] env[62383]: value = "task-2452310" [ 1064.980051] env[62383]: _type = "Task" [ 1064.980051] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.987648] env[62383]: DEBUG oslo_vmware.api [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.046330] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452306, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074003} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.046677] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.047597] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e336b326-63bc-45b2-befe-d4bb6d498d6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.071919] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] de24aca8-30fc-453e-b192-b6bb115876ef/de24aca8-30fc-453e-b192-b6bb115876ef.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.072993] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74bcfccd-9d73-4258-910e-bdd38d9d0543 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.092971] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1065.092971] env[62383]: value = "task-2452311" [ 1065.092971] env[62383]: _type = "Task" [ 1065.092971] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.100815] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452311, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.135180] env[62383]: INFO nova.compute.manager [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Took 20.19 seconds to build instance. [ 1065.179855] env[62383]: DEBUG oslo_vmware.api [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452307, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.451380] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452309, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.489597] env[62383]: DEBUG oslo_vmware.api [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152293} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.489860] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.490057] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.490262] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.490462] env[62383]: INFO nova.compute.manager [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1065.490717] env[62383]: DEBUG oslo.service.loopingcall [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1065.490915] env[62383]: DEBUG nova.compute.manager [-] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.491023] env[62383]: DEBUG nova.network.neutron [-] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.603842] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452311, 'name': ReconfigVM_Task, 'duration_secs': 0.255282} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.604287] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Reconfigured VM instance instance-0000006b to attach disk [datastore2] de24aca8-30fc-453e-b192-b6bb115876ef/de24aca8-30fc-453e-b192-b6bb115876ef.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.605403] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-234e4f75-4857-47f5-a79c-5e41814c91c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.613202] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1065.613202] env[62383]: value = "task-2452312" [ 1065.613202] env[62383]: _type = "Task" [ 1065.613202] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.621399] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452312, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.637014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d52256b5-492d-4764-8e26-2f579d0d5ed4 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.703s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.681660] env[62383]: DEBUG oslo_vmware.api [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452307, 'name': PowerOffVM_Task, 'duration_secs': 0.508439} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.681951] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.682182] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.682479] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10b3a324-d88a-4452-98bd-9772d1cd7d71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.745968] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.746368] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.746602] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleting the datastore file [datastore2] c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.746959] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b053da49-23bb-4873-a573-566aa542251b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.756256] env[62383]: DEBUG oslo_vmware.api [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for the task: (returnval){ [ 1065.756256] env[62383]: value = "task-2452314" [ 1065.756256] env[62383]: _type = "Task" [ 1065.756256] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.764904] env[62383]: DEBUG oslo_vmware.api [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452314, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.811773] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1065.812141] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-276c7e16-28f6-45b0-bcef-b02fdccd6e69 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.819264] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1065.819264] env[62383]: value = "task-2452315" [ 1065.819264] env[62383]: _type = "Task" [ 1065.819264] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.827754] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452315, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.930389] env[62383]: DEBUG nova.compute.manager [req-220f886d-f811-47cf-a623-e083943dd376 req-987cb162-42fa-4bf6-acab-d4a3fa0f120d service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Received event network-vif-deleted-59f7a7fd-f1a7-4f6e-a37b-bc4baf716887 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1065.930389] env[62383]: INFO nova.compute.manager [req-220f886d-f811-47cf-a623-e083943dd376 req-987cb162-42fa-4bf6-acab-d4a3fa0f120d service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Neutron deleted interface 59f7a7fd-f1a7-4f6e-a37b-bc4baf716887; detaching it from the instance and deleting it from the info cache [ 1065.930389] env[62383]: DEBUG nova.network.neutron [req-220f886d-f811-47cf-a623-e083943dd376 req-987cb162-42fa-4bf6-acab-d4a3fa0f120d service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.950621] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452309, 'name': RemoveSnapshot_Task, 'duration_secs': 0.654397} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.950996] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1065.951471] env[62383]: DEBUG nova.compute.manager [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1065.954016] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2d0bb6-9246-46b0-981f-29d7e6b47155 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.127014] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452312, 'name': Rename_Task, 'duration_secs': 0.246271} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.127014] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.127014] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd2b49c7-3958-44e0-ad40-32a6a5859137 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.135496] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1066.135496] env[62383]: value = "task-2452316" [ 1066.135496] env[62383]: _type = "Task" [ 1066.135496] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.142361] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452316, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.269152] env[62383]: DEBUG oslo_vmware.api [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Task: {'id': task-2452314, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154063} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.269152] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.269152] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1066.269152] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1066.269152] env[62383]: INFO nova.compute.manager [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1066.269152] env[62383]: DEBUG oslo.service.loopingcall [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1066.269152] env[62383]: DEBUG nova.compute.manager [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1066.269152] env[62383]: DEBUG nova.network.neutron [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1066.329388] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452315, 'name': PowerOffVM_Task, 'duration_secs': 0.26348} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.329641] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1066.330489] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545b3a94-4fce-4ccc-a735-a925a15cab8f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.349645] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23831d8-405a-4fb4-9218-74a22df648d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.398714] env[62383]: DEBUG nova.network.neutron [-] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.432625] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04c67bd1-022a-4a87-87c4-15177b3999c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.443277] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd02c8c-d720-4f3b-b5ef-0dd9b957b19e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.464893] env[62383]: INFO nova.compute.manager [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Shelve offloading [ 1066.490344] env[62383]: DEBUG nova.compute.manager [req-220f886d-f811-47cf-a623-e083943dd376 req-987cb162-42fa-4bf6-acab-d4a3fa0f120d service nova] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Detach interface failed, port_id=59f7a7fd-f1a7-4f6e-a37b-bc4baf716887, reason: Instance 8f639983-e7ef-4a63-94b6-5c5256015937 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1066.644067] env[62383]: DEBUG oslo_vmware.api [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452316, 'name': PowerOnVM_Task, 'duration_secs': 0.486629} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.644364] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1066.644599] env[62383]: INFO nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1066.644784] env[62383]: DEBUG nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.645599] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa8aa7e-9dd9-4919-b3a0-907a3ace5c3a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.860724] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1066.862179] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1965f253-4257-4d0a-9404-80768e1fc054 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.872636] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1066.872636] env[62383]: value = "task-2452317" [ 1066.872636] env[62383]: _type = "Task" [ 1066.872636] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.883485] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452317, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.901393] env[62383]: INFO nova.compute.manager [-] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Took 1.41 seconds to deallocate network for instance. [ 1066.972014] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.972354] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ee0b9b1-f718-4810-8294-15947cf9ad9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.982033] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1066.982033] env[62383]: value = "task-2452318" [ 1066.982033] env[62383]: _type = "Task" [ 1066.982033] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.990909] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.097584] env[62383]: DEBUG nova.network.neutron [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.164336] env[62383]: INFO nova.compute.manager [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Took 20.41 seconds to build instance. [ 1067.383544] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452317, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.409880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.410093] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.410701] env[62383]: DEBUG nova.objects.instance [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lazy-loading 'resources' on Instance uuid 8f639983-e7ef-4a63-94b6-5c5256015937 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.493977] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1067.494279] env[62383]: DEBUG nova.compute.manager [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.495114] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a13f6c-2d0c-47cf-8f25-586982a0890f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.504319] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.504480] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.504700] env[62383]: DEBUG nova.network.neutron [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.600883] env[62383]: INFO nova.compute.manager [-] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Took 1.33 seconds to deallocate network for instance. [ 1067.666385] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7245b48f-0ed5-4a4f-adcf-283f8dd8ae98 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.917s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.885061] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452317, 'name': CreateSnapshot_Task, 'duration_secs': 0.885529} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.885371] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1067.886143] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79d89d7-c37e-442e-bb36-f4cb31283253 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.963644] env[62383]: DEBUG nova.compute.manager [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-changed-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1067.963852] env[62383]: DEBUG nova.compute.manager [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing instance network info cache due to event network-changed-e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1067.964605] env[62383]: DEBUG oslo_concurrency.lockutils [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1067.964605] env[62383]: DEBUG oslo_concurrency.lockutils [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.964605] env[62383]: DEBUG nova.network.neutron [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing network info cache for port e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1068.096444] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552d019b-670c-44f0-809b-e6487437bb11 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.107323] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.107862] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b66ee0-584e-4739-9807-f7e75212ea5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.142891] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a438b3-bb10-4b4b-ac92-ea1363f96ef0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.151555] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b731fb65-967e-4188-ac9f-9691b11d880f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.166816] env[62383]: DEBUG nova.compute.provider_tree [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.234015] env[62383]: DEBUG nova.network.neutron [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap275a086a-50", "ovs_interfaceid": "275a086a-5096-4414-8397-af9ac5331f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.410380] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1068.410708] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9ecf43b6-178e-45be-8399-20ad85e1d945 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.421072] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1068.421072] env[62383]: value = "task-2452319" [ 1068.421072] env[62383]: _type = "Task" [ 1068.421072] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.432988] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452319, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.669900] env[62383]: DEBUG nova.scheduler.client.report [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.694076] env[62383]: DEBUG nova.network.neutron [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updated VIF entry in instance network info cache for port e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.694471] env[62383]: DEBUG nova.network.neutron [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape822f89d-51", "ovs_interfaceid": "e822f89d-516c-4eab-bd54-f1369994f514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.736570] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1068.932326] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452319, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.128090] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.129103] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79179a4-d74c-41f8-ba6a-4163a6855f52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.137788] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1069.138104] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d34ab201-f4d2-4038-a96d-2ba170a8e63a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.175306] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.179361] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.072s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.179846] env[62383]: DEBUG nova.objects.instance [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lazy-loading 'resources' on Instance uuid c56464dd-63af-4686-b666-d0ac2df01ec1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.197703] env[62383]: DEBUG oslo_concurrency.lockutils [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1069.198159] env[62383]: DEBUG nova.compute.manager [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Received event network-vif-deleted-0afca8d2-b019-4a25-af28-7061dbf32e28 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1069.198558] env[62383]: DEBUG nova.compute.manager [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Received event network-changed-3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1069.198793] env[62383]: DEBUG nova.compute.manager [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Refreshing instance network info cache due to event network-changed-3e70fa62-b81f-4cf7-950b-772addf79f9c. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1069.199115] env[62383]: DEBUG oslo_concurrency.lockutils [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] Acquiring lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.199365] env[62383]: DEBUG oslo_concurrency.lockutils [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] Acquired lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.199671] env[62383]: DEBUG nova.network.neutron [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Refreshing network info cache for port 3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1069.202590] env[62383]: INFO nova.scheduler.client.report [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleted allocations for instance 8f639983-e7ef-4a63-94b6-5c5256015937 [ 1069.243175] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1069.243175] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1069.243445] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleting the datastore file [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.244404] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b61ba06-a511-42b9-97a0-9e7f36a8cb18 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.254415] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1069.254415] env[62383]: value = "task-2452321" [ 1069.254415] env[62383]: _type = "Task" [ 1069.254415] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.264766] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.432930] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452319, 'name': CloneVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.708924] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2b4a1caf-f57e-45b3-980a-f060e104cc8b tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "8f639983-e7ef-4a63-94b6-5c5256015937" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.839s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.767233] env[62383]: DEBUG oslo_vmware.api [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16716} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.767489] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1069.767676] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1069.767857] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1069.783551] env[62383]: INFO nova.scheduler.client.report [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted allocations for instance ec7c648d-10b0-480a-a5f0-4dab08d0049e [ 1069.834086] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab0f32b-4685-43a2-bec4-a70b4fe1a78a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.846549] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74767e68-39b9-4691-bcaf-a4a270b84940 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.879245] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69d7378-0871-409a-aba1-0050b5273b67 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.887906] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d189e981-0313-4869-9218-a973f3a2f7cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.901559] env[62383]: DEBUG nova.compute.provider_tree [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.933115] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452319, 'name': CloneVM_Task, 'duration_secs': 1.030251} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.933385] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Created linked-clone VM from snapshot [ 1069.934144] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264828d8-d11a-429f-9278-7721f8f8b55e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.942140] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Uploading image d482abfd-239b-4be9-aa8b-1dc99c06f2de {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1069.944652] env[62383]: DEBUG nova.network.neutron [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Updated VIF entry in instance network info cache for port 3e70fa62-b81f-4cf7-950b-772addf79f9c. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1069.945037] env[62383]: DEBUG nova.network.neutron [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Updating instance_info_cache with network_info: [{"id": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "address": "fa:16:3e:cc:4d:0a", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e70fa62-b8", "ovs_interfaceid": "3e70fa62-b81f-4cf7-950b-772addf79f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.967450] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1069.967450] env[62383]: value = "vm-496587" [ 1069.967450] env[62383]: _type = "VirtualMachine" [ 1069.967450] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1069.967934] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-97440241-d7fd-420b-9f97-7f9cd00a15bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.976693] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lease: (returnval){ [ 1069.976693] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5218ebe1-6842-80b3-b051-e77d728e530f" [ 1069.976693] env[62383]: _type = "HttpNfcLease" [ 1069.976693] env[62383]: } obtained for exporting VM: (result){ [ 1069.976693] env[62383]: value = "vm-496587" [ 1069.976693] env[62383]: _type = "VirtualMachine" [ 1069.976693] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1069.976693] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the lease: (returnval){ [ 1069.976693] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5218ebe1-6842-80b3-b051-e77d728e530f" [ 1069.976693] env[62383]: _type = "HttpNfcLease" [ 1069.976693] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1069.984302] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1069.984302] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5218ebe1-6842-80b3-b051-e77d728e530f" [ 1069.984302] env[62383]: _type = "HttpNfcLease" [ 1069.984302] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1069.991191] env[62383]: DEBUG nova.compute.manager [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-vif-unplugged-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1069.991396] env[62383]: DEBUG oslo_concurrency.lockutils [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.991637] env[62383]: DEBUG oslo_concurrency.lockutils [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.991804] env[62383]: DEBUG oslo_concurrency.lockutils [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1069.991971] env[62383]: DEBUG nova.compute.manager [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] No waiting events found dispatching network-vif-unplugged-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1069.992157] env[62383]: WARNING nova.compute.manager [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received unexpected event network-vif-unplugged-275a086a-5096-4414-8397-af9ac5331f87 for instance with vm_state shelved_offloaded and task_state None. [ 1069.992318] env[62383]: DEBUG nova.compute.manager [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-changed-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1069.992502] env[62383]: DEBUG nova.compute.manager [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing instance network info cache due to event network-changed-275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1069.992645] env[62383]: DEBUG oslo_concurrency.lockutils [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] Acquiring lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1069.992863] env[62383]: DEBUG oslo_concurrency.lockutils [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] Acquired lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.993044] env[62383]: DEBUG nova.network.neutron [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing network info cache for port 275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.287715] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.404908] env[62383]: DEBUG nova.scheduler.client.report [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.449133] env[62383]: DEBUG oslo_concurrency.lockutils [req-a342bf8e-a52d-4f60-82be-468365c82081 req-a87ba406-9756-4d89-b672-3c512afa6144 service nova] Releasing lock "refresh_cache-de24aca8-30fc-453e-b192-b6bb115876ef" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.485898] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1070.485898] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5218ebe1-6842-80b3-b051-e77d728e530f" [ 1070.485898] env[62383]: _type = "HttpNfcLease" [ 1070.485898] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1070.486262] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1070.486262] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5218ebe1-6842-80b3-b051-e77d728e530f" [ 1070.486262] env[62383]: _type = "HttpNfcLease" [ 1070.486262] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1070.487085] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14b02a2-a0c5-40b6-9591-b1ac8dc363d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.498897] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528e3716-a130-b4ef-12cf-cf56a0962205/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1070.499150] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528e3716-a130-b4ef-12cf-cf56a0962205/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1070.613917] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b14a9c29-d39b-4cae-b814-830a57c8f571 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.797532] env[62383]: DEBUG nova.network.neutron [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updated VIF entry in instance network info cache for port 275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.797914] env[62383]: DEBUG nova.network.neutron [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap275a086a-50", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.903650] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "20861554-890b-4ad3-a73f-0c825a79bbf1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.904016] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.904333] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "20861554-890b-4ad3-a73f-0c825a79bbf1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.904620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.904833] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.906994] env[62383]: INFO nova.compute.manager [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Terminating instance [ 1070.908873] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.912692] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.625s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.913109] env[62383]: DEBUG nova.objects.instance [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'resources' on Instance uuid ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.931826] env[62383]: INFO nova.scheduler.client.report [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Deleted allocations for instance c56464dd-63af-4686-b666-d0ac2df01ec1 [ 1071.300465] env[62383]: DEBUG oslo_concurrency.lockutils [req-d9ce9f93-30b0-40da-96ba-d032bb282681 req-8b1cb701-8de8-4f4d-a9ef-7b3352f80576 service nova] Releasing lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1071.414689] env[62383]: DEBUG nova.compute.manager [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1071.414932] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1071.415521] env[62383]: DEBUG nova.objects.instance [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'numa_topology' on Instance uuid ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.417166] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c23aa6b-0bef-446f-9b56-f9c6ee8dc529 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.427310] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.427645] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a715df30-c986-4e0f-add2-0beee281b4a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.436726] env[62383]: DEBUG oslo_vmware.api [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1071.436726] env[62383]: value = "task-2452323" [ 1071.436726] env[62383]: _type = "Task" [ 1071.436726] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.444309] env[62383]: DEBUG oslo_concurrency.lockutils [None req-78c50008-70bf-48ca-9148-4d759cc74228 tempest-ServersNegativeTestJSON-1999070260 tempest-ServersNegativeTestJSON-1999070260-project-member] Lock "c56464dd-63af-4686-b666-d0ac2df01ec1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.296s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.450529] env[62383]: DEBUG oslo_vmware.api [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.715324] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.922151] env[62383]: DEBUG nova.objects.base [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1071.956706] env[62383]: DEBUG oslo_vmware.api [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452323, 'name': PowerOffVM_Task, 'duration_secs': 0.200888} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.957151] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.957669] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.958123] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28947097-2629-4a88-8873-01e0ea201c94 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.034956] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.035494] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.035494] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleting the datastore file [datastore2] 20861554-890b-4ad3-a73f-0c825a79bbf1 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.036020] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fd5f748-11f0-47ad-9701-67b9b457aaf5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.047845] env[62383]: DEBUG oslo_vmware.api [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for the task: (returnval){ [ 1072.047845] env[62383]: value = "task-2452325" [ 1072.047845] env[62383]: _type = "Task" [ 1072.047845] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.062900] env[62383]: DEBUG oslo_vmware.api [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.112880] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86f4cec-9e50-4445-9bd0-cbb1b0b5f4f0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.121856] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78dd0db-9f6a-4ebc-886e-7776da2afb58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.163808] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac4e92c-e4d0-4ec5-8fdb-dcabf6ce5af0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.174166] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3238d0c4-effd-4ac6-acd2-eed7407394fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.193079] env[62383]: DEBUG nova.compute.provider_tree [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.559057] env[62383]: DEBUG oslo_vmware.api [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Task: {'id': task-2452325, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195646} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.559360] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1072.559542] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1072.560252] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1072.560252] env[62383]: INFO nova.compute.manager [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1072.560252] env[62383]: DEBUG oslo.service.loopingcall [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1072.560437] env[62383]: DEBUG nova.compute.manager [-] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1072.560593] env[62383]: DEBUG nova.network.neutron [-] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1072.705515] env[62383]: DEBUG nova.scheduler.client.report [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.143307] env[62383]: DEBUG nova.compute.manager [req-3a71b07d-aa76-45a5-b2f7-768eb28d096f req-0f11ce2c-692f-4755-80b7-6731e5282fb4 service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Received event network-vif-deleted-c23a7aba-1afc-4edb-b3ee-d6e718b18392 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1073.143587] env[62383]: INFO nova.compute.manager [req-3a71b07d-aa76-45a5-b2f7-768eb28d096f req-0f11ce2c-692f-4755-80b7-6731e5282fb4 service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Neutron deleted interface c23a7aba-1afc-4edb-b3ee-d6e718b18392; detaching it from the instance and deleting it from the info cache [ 1073.144071] env[62383]: DEBUG nova.network.neutron [req-3a71b07d-aa76-45a5-b2f7-768eb28d096f req-0f11ce2c-692f-4755-80b7-6731e5282fb4 service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.216032] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.303s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.628490] env[62383]: DEBUG nova.network.neutron [-] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.646656] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-386f3820-cef3-4f10-8452-b3114d3e83aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.658063] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4887daa-0270-44f3-ac54-31e017664226 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.691246] env[62383]: DEBUG nova.compute.manager [req-3a71b07d-aa76-45a5-b2f7-768eb28d096f req-0f11ce2c-692f-4755-80b7-6731e5282fb4 service nova] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Detach interface failed, port_id=c23a7aba-1afc-4edb-b3ee-d6e718b18392, reason: Instance 20861554-890b-4ad3-a73f-0c825a79bbf1 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1073.724132] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6b779b23-e42f-43d8-9c7f-68c3390c9532 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.250s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.724970] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.010s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.725169] env[62383]: INFO nova.compute.manager [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Unshelving [ 1074.131479] env[62383]: INFO nova.compute.manager [-] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Took 1.57 seconds to deallocate network for instance. [ 1074.638909] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.639202] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.639391] env[62383]: DEBUG nova.objects.instance [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lazy-loading 'resources' on Instance uuid 20861554-890b-4ad3-a73f-0c825a79bbf1 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.737226] env[62383]: DEBUG nova.compute.utils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.239972] env[62383]: INFO nova.virt.block_device [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Booting with volume d01d3eba-40b0-4856-9209-d6e56c81b4a7 at /dev/sdb [ 1075.277394] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3ddee30-ecdd-4d9e-b198-ec6af29998ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.290319] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a69ef30-3872-435a-87a3-430599b6f2c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.305019] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be56b0ac-55b5-4fce-af42-97b9db11f9d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.314049] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf7c81b-4567-439f-9cc2-3b26caa5cc43 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.356277] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85f4d5d2-ea5d-42f2-8e7c-3909b9512436 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.358984] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6129c78-376b-4c0f-9712-78b40c1dd73e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.369084] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2769668-b1ba-43ea-bad4-43c5eb2bb2af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.375851] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da871b8-08c4-4f61-acec-c29c0cd150e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.398684] env[62383]: DEBUG nova.compute.provider_tree [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.411361] env[62383]: DEBUG nova.scheduler.client.report [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.415166] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082de81b-8e17-4878-a869-04623abe303d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.424062] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c97c89-750d-4eba-a238-ca6a439d4bb7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.442188] env[62383]: DEBUG nova.virt.block_device [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating existing volume attachment record: 16668e81-9451-4649-b896-c3d3605b070f {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1075.920465] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.281s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.940420] env[62383]: INFO nova.scheduler.client.report [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Deleted allocations for instance 20861554-890b-4ad3-a73f-0c825a79bbf1 [ 1076.448890] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e9e912d0-db99-4d38-b2db-48f8fe934fcf tempest-ListServerFiltersTestJSON-374104930 tempest-ListServerFiltersTestJSON-374104930-project-member] Lock "20861554-890b-4ad3-a73f-0c825a79bbf1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.545s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.925959] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.926274] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.926398] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.926587] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.926762] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.929508] env[62383]: INFO nova.compute.manager [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Terminating instance [ 1077.435333] env[62383]: DEBUG nova.compute.manager [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1077.435586] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1077.436549] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e91e1c-f1ad-4cfd-987e-3cc71e107257 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.445669] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1077.445956] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a5bef15-af5d-4b8b-9fcc-9674e8ed0e10 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.454712] env[62383]: DEBUG oslo_vmware.api [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1077.454712] env[62383]: value = "task-2452327" [ 1077.454712] env[62383]: _type = "Task" [ 1077.454712] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.464662] env[62383]: DEBUG oslo_vmware.api [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.966443] env[62383]: DEBUG oslo_vmware.api [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452327, 'name': PowerOffVM_Task, 'duration_secs': 0.248347} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.966859] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.966956] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1077.967204] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9602cea3-c6bc-4ab5-8917-15c9a196b136 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.039565] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1078.039950] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1078.040149] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleting the datastore file [datastore2] 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1078.040480] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1dd6340-c915-46f2-b3b5-7d4a94e4ffeb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.050397] env[62383]: DEBUG oslo_vmware.api [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1078.050397] env[62383]: value = "task-2452331" [ 1078.050397] env[62383]: _type = "Task" [ 1078.050397] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.062647] env[62383]: DEBUG oslo_vmware.api [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.433854] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528e3716-a130-b4ef-12cf-cf56a0962205/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1078.434916] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783ab304-89a1-4a5b-b44c-6d5083ad3035 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.445100] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528e3716-a130-b4ef-12cf-cf56a0962205/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1078.445281] env[62383]: ERROR oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528e3716-a130-b4ef-12cf-cf56a0962205/disk-0.vmdk due to incomplete transfer. [ 1078.445678] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b667abb4-0e92-43ab-a7ed-473caa8a3b6c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.453825] env[62383]: DEBUG oslo_vmware.rw_handles [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528e3716-a130-b4ef-12cf-cf56a0962205/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1078.454386] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Uploaded image d482abfd-239b-4be9-aa8b-1dc99c06f2de to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1078.456461] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1078.456573] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-27effca8-c3e1-4b98-8189-e46fda1e004c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.468460] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1078.468460] env[62383]: value = "task-2452332" [ 1078.468460] env[62383]: _type = "Task" [ 1078.468460] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.477486] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452332, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.562932] env[62383]: DEBUG oslo_vmware.api [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167773} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.564012] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1078.564012] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1078.565049] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1078.565049] env[62383]: INFO nova.compute.manager [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1078.565049] env[62383]: DEBUG oslo.service.loopingcall [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1078.565049] env[62383]: DEBUG nova.compute.manager [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1078.565049] env[62383]: DEBUG nova.network.neutron [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1078.979368] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452332, 'name': Destroy_Task, 'duration_secs': 0.394489} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.979704] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Destroyed the VM [ 1078.979869] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1078.980140] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6f86d39d-71a9-4eb9-b79a-bcefdc8863ac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.988830] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1078.988830] env[62383]: value = "task-2452333" [ 1078.988830] env[62383]: _type = "Task" [ 1078.988830] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.997818] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452333, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.104252] env[62383]: DEBUG nova.compute.manager [req-6f3d6106-42b1-4ee8-9a22-16498158cd06 req-52d55ecb-972e-448c-8167-1449b2440252 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Received event network-vif-deleted-61bdafb3-8c09-454a-af63-5aaacc52947b {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1079.104643] env[62383]: INFO nova.compute.manager [req-6f3d6106-42b1-4ee8-9a22-16498158cd06 req-52d55ecb-972e-448c-8167-1449b2440252 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Neutron deleted interface 61bdafb3-8c09-454a-af63-5aaacc52947b; detaching it from the instance and deleting it from the info cache [ 1079.104895] env[62383]: DEBUG nova.network.neutron [req-6f3d6106-42b1-4ee8-9a22-16498158cd06 req-52d55ecb-972e-448c-8167-1449b2440252 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.500866] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452333, 'name': RemoveSnapshot_Task, 'duration_secs': 0.387974} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.501093] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1079.501244] env[62383]: DEBUG nova.compute.manager [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.502081] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88212e6-07ff-4313-9c78-41df6616312c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.577790] env[62383]: DEBUG nova.network.neutron [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.607572] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a9a25b2-2bfd-4b14-89a0-7e2751f38a0c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.619093] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef43525-dc31-4c05-a18b-726f5ffb34ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.656381] env[62383]: DEBUG nova.compute.manager [req-6f3d6106-42b1-4ee8-9a22-16498158cd06 req-52d55ecb-972e-448c-8167-1449b2440252 service nova] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Detach interface failed, port_id=61bdafb3-8c09-454a-af63-5aaacc52947b, reason: Instance 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1080.017481] env[62383]: INFO nova.compute.manager [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Shelve offloading [ 1080.082101] env[62383]: INFO nova.compute.manager [-] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Took 1.52 seconds to deallocate network for instance. [ 1080.333442] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.333680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.521836] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1080.522192] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3bda8a87-3266-48fb-8d7f-50c19aa562a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.533681] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1080.533681] env[62383]: value = "task-2452335" [ 1080.533681] env[62383]: _type = "Task" [ 1080.533681] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.542968] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1080.543219] env[62383]: DEBUG nova.compute.manager [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.544044] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0dc348-2029-42af-9e27-e019c72ab996 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.558636] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.558817] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.559012] env[62383]: DEBUG nova.network.neutron [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.589842] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.590078] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.593301] env[62383]: DEBUG nova.objects.instance [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'resources' on Instance uuid 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.835712] env[62383]: DEBUG nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1081.335044] env[62383]: DEBUG nova.network.neutron [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Updating instance_info_cache with network_info: [{"id": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "address": "fa:16:3e:56:8f:e8", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c1ffea5-d0", "ovs_interfaceid": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.342229] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d7c7ee-9fe2-4514-ab0e-705abfe18e82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.354796] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226d6cb2-8bd5-4e2a-88ba-6aec28fc6397 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.359831] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.388609] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e837312f-723b-4277-9e92-b9718d34a472 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.396856] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7638a31-5ccd-43af-a6d7-6fc627b8dba5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.412537] env[62383]: DEBUG nova.compute.provider_tree [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.837111] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.916742] env[62383]: DEBUG nova.scheduler.client.report [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.093346] env[62383]: DEBUG nova.compute.manager [req-92395dae-63e2-47d9-a601-83e98c308523 req-e71441d1-cf84-4134-8624-8bdec0ea2555 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Received event network-vif-unplugged-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1082.093577] env[62383]: DEBUG oslo_concurrency.lockutils [req-92395dae-63e2-47d9-a601-83e98c308523 req-e71441d1-cf84-4134-8624-8bdec0ea2555 service nova] Acquiring lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.093784] env[62383]: DEBUG oslo_concurrency.lockutils [req-92395dae-63e2-47d9-a601-83e98c308523 req-e71441d1-cf84-4134-8624-8bdec0ea2555 service nova] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.094037] env[62383]: DEBUG oslo_concurrency.lockutils [req-92395dae-63e2-47d9-a601-83e98c308523 req-e71441d1-cf84-4134-8624-8bdec0ea2555 service nova] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.094139] env[62383]: DEBUG nova.compute.manager [req-92395dae-63e2-47d9-a601-83e98c308523 req-e71441d1-cf84-4134-8624-8bdec0ea2555 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] No waiting events found dispatching network-vif-unplugged-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.094303] env[62383]: WARNING nova.compute.manager [req-92395dae-63e2-47d9-a601-83e98c308523 req-e71441d1-cf84-4134-8624-8bdec0ea2555 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Received unexpected event network-vif-unplugged-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d for instance with vm_state shelved and task_state shelving_offloading. [ 1082.138013] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1082.139215] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e798cbd2-32c5-4ceb-b3a2-8ba849d69fa3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.148072] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.148229] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3409c766-a84c-4210-a9f2-254a73373d6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.227086] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.227266] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.227448] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleting the datastore file [datastore2] 981aa014-4861-4ab3-94e3-c113eec9bf29 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.227816] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d168f091-3a6b-4c8b-bdbb-a8634c628b28 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.235419] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1082.235419] env[62383]: value = "task-2452337" [ 1082.235419] env[62383]: _type = "Task" [ 1082.235419] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.243746] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.421307] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.423679] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.064s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.425757] env[62383]: INFO nova.compute.claims [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1082.444603] env[62383]: INFO nova.scheduler.client.report [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted allocations for instance 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7 [ 1082.747104] env[62383]: DEBUG oslo_vmware.api [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14313} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.747376] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.747561] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1082.747736] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.763082] env[62383]: INFO nova.scheduler.client.report [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted allocations for instance 981aa014-4861-4ab3-94e3-c113eec9bf29 [ 1082.952864] env[62383]: DEBUG oslo_concurrency.lockutils [None req-510cbe7e-a8bd-44d0-a38b-e7daf6aef148 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "3e868358-2aa2-4ddd-9c2e-16eb5c194bb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.027s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.045973] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.266957] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.562023] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8cf7ea-72e0-4565-9e19-2734d25dff1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.575579] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22de687-629b-4e22-bb91-7d722632a1be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.614037] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aebe6c5-1e61-4b72-a8b8-27f48d3cc4da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.622262] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae220c4-31f4-441a-bfc4-1085f8c003c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.638443] env[62383]: DEBUG nova.compute.provider_tree [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.121282] env[62383]: DEBUG nova.compute.manager [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Received event network-changed-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1084.121474] env[62383]: DEBUG nova.compute.manager [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Refreshing instance network info cache due to event network-changed-4c1ffea5-d09f-4f98-bbe4-f02d40cad88d. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1084.121689] env[62383]: DEBUG oslo_concurrency.lockutils [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] Acquiring lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.121836] env[62383]: DEBUG oslo_concurrency.lockutils [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] Acquired lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.121995] env[62383]: DEBUG nova.network.neutron [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Refreshing network info cache for port 4c1ffea5-d09f-4f98-bbe4-f02d40cad88d {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1084.140688] env[62383]: DEBUG nova.scheduler.client.report [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.158620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "8bd05dac-7aa2-44c5-8752-6045c01d213d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.158860] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.159075] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "8bd05dac-7aa2-44c5-8752-6045c01d213d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.159258] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.159427] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.161293] env[62383]: INFO nova.compute.manager [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Terminating instance [ 1084.241026] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1084.241026] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1084.241211] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1084.241211] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Rebuilding the list of instances to heal {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1084.445536] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "981aa014-4861-4ab3-94e3-c113eec9bf29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.644959] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.645455] env[62383]: DEBUG nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1084.648418] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.603s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.648628] env[62383]: DEBUG nova.objects.instance [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'pci_requests' on Instance uuid ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.664987] env[62383]: DEBUG nova.compute.manager [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1084.665220] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1084.666319] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b16bcf3-fad9-4a0c-81fd-3cb8fa3000c0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.674220] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1084.674454] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d83b066f-6290-4e60-b1d3-fe1151a20938 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.681477] env[62383]: DEBUG oslo_vmware.api [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 1084.681477] env[62383]: value = "task-2452338" [ 1084.681477] env[62383]: _type = "Task" [ 1084.681477] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.689953] env[62383]: DEBUG oslo_vmware.api [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2452338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.746380] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Skipping network cache update for instance because it is being deleted. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 1084.746557] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Skipping network cache update for instance because it is being deleted. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10462}} [ 1084.746700] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1084.746902] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1084.747031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.747164] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1084.747304] env[62383]: DEBUG nova.objects.instance [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lazy-loading 'info_cache' on Instance uuid 2f028680-8db4-474a-8f24-880c4702877b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.858846] env[62383]: DEBUG nova.compute.manager [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1084.866246] env[62383]: DEBUG nova.network.neutron [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Updated VIF entry in instance network info cache for port 4c1ffea5-d09f-4f98-bbe4-f02d40cad88d. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.866585] env[62383]: DEBUG nova.network.neutron [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Updating instance_info_cache with network_info: [{"id": "4c1ffea5-d09f-4f98-bbe4-f02d40cad88d", "address": "fa:16:3e:56:8f:e8", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": null, "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4c1ffea5-d0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.151414] env[62383]: DEBUG nova.compute.utils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1085.154039] env[62383]: DEBUG nova.objects.instance [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'numa_topology' on Instance uuid ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.155100] env[62383]: DEBUG nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1085.155328] env[62383]: DEBUG nova.network.neutron [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1085.189206] env[62383]: DEBUG nova.policy [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd185acc82c2345d096708616cfbd8a2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b571010721e4416971f1d4beffe7aef', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1085.194110] env[62383]: DEBUG oslo_vmware.api [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2452338, 'name': PowerOffVM_Task, 'duration_secs': 0.192198} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.194449] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.194634] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1085.194879] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e04d5e07-101b-4ac1-b9f5-618ced89eebf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.265019] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1085.265434] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1085.265716] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Deleting the datastore file [datastore1] 8bd05dac-7aa2-44c5-8752-6045c01d213d {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1085.266406] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d7beea9-4f60-41ae-afb5-e5e8321220cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.274112] env[62383]: DEBUG oslo_vmware.api [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for the task: (returnval){ [ 1085.274112] env[62383]: value = "task-2452340" [ 1085.274112] env[62383]: _type = "Task" [ 1085.274112] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.282609] env[62383]: DEBUG oslo_vmware.api [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2452340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.370888] env[62383]: DEBUG oslo_concurrency.lockutils [req-9d53d0dd-33fe-4796-b8fb-272dbc80af42 req-603c4cb9-a2d6-45c8-bcef-73ad96737516 service nova] Releasing lock "refresh_cache-981aa014-4861-4ab3-94e3-c113eec9bf29" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1085.379113] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.487778] env[62383]: DEBUG nova.network.neutron [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Successfully created port: 92d87ef0-de90-43ef-aef1-b558be3349b9 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1085.657695] env[62383]: DEBUG nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1085.660158] env[62383]: INFO nova.compute.claims [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1085.785763] env[62383]: DEBUG oslo_vmware.api [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Task: {'id': task-2452340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.355579} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.788118] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.788373] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1085.788580] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1085.788774] env[62383]: INFO nova.compute.manager [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1085.789147] env[62383]: DEBUG oslo.service.loopingcall [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1085.789568] env[62383]: DEBUG nova.compute.manager [-] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1085.789666] env[62383]: DEBUG nova.network.neutron [-] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1086.249311] env[62383]: DEBUG nova.compute.manager [req-83ac060c-2302-4241-b8e3-2d5ad4dedf19 req-bb953871-45c7-40c5-a123-8822fd207f06 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Received event network-vif-deleted-9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1086.249311] env[62383]: INFO nova.compute.manager [req-83ac060c-2302-4241-b8e3-2d5ad4dedf19 req-bb953871-45c7-40c5-a123-8822fd207f06 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Neutron deleted interface 9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4; detaching it from the instance and deleting it from the info cache [ 1086.250023] env[62383]: DEBUG nova.network.neutron [req-83ac060c-2302-4241-b8e3-2d5ad4dedf19 req-bb953871-45c7-40c5-a123-8822fd207f06 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.469933] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.672177] env[62383]: DEBUG nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1086.696170] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1086.696434] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1086.696591] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1086.696773] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1086.696921] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1086.697115] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1086.697371] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1086.697545] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1086.697713] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1086.697875] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1086.698058] env[62383]: DEBUG nova.virt.hardware [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1086.698990] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a928d3d3-367b-4d6f-822e-0f7303b6a639 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.709479] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07eb6956-2153-4602-9fef-7e716b57158a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.733494] env[62383]: DEBUG nova.network.neutron [-] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.754854] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32683529-c572-4293-b1a8-fa92e1adc8bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.767482] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a616306-31ab-4790-8a8a-eccc41a78807 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.801447] env[62383]: DEBUG nova.compute.manager [req-83ac060c-2302-4241-b8e3-2d5ad4dedf19 req-bb953871-45c7-40c5-a123-8822fd207f06 service nova] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Detach interface failed, port_id=9ff3b35c-c2b8-4984-9a1b-52c06da6e6c4, reason: Instance 8bd05dac-7aa2-44c5-8752-6045c01d213d could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1086.827720] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f47f4c-1ca2-4ab4-be4b-b2b909f35f5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.837419] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b1972c-634f-4c8a-8ced-14537da96975 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.874428] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30277233-1550-42c2-bd58-753be77eec38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.882655] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1298490-5fba-4200-9490-46a6c004c868 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.896560] env[62383]: DEBUG nova.compute.provider_tree [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1086.899573] env[62383]: DEBUG nova.compute.manager [req-ced87af4-295a-4c0f-9f42-a26bcf82026f req-f6fb5247-7632-47e0-b43f-d92242850791 service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Received event network-vif-plugged-92d87ef0-de90-43ef-aef1-b558be3349b9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1086.899777] env[62383]: DEBUG oslo_concurrency.lockutils [req-ced87af4-295a-4c0f-9f42-a26bcf82026f req-f6fb5247-7632-47e0-b43f-d92242850791 service nova] Acquiring lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.899987] env[62383]: DEBUG oslo_concurrency.lockutils [req-ced87af4-295a-4c0f-9f42-a26bcf82026f req-f6fb5247-7632-47e0-b43f-d92242850791 service nova] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.900170] env[62383]: DEBUG oslo_concurrency.lockutils [req-ced87af4-295a-4c0f-9f42-a26bcf82026f req-f6fb5247-7632-47e0-b43f-d92242850791 service nova] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.900333] env[62383]: DEBUG nova.compute.manager [req-ced87af4-295a-4c0f-9f42-a26bcf82026f req-f6fb5247-7632-47e0-b43f-d92242850791 service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] No waiting events found dispatching network-vif-plugged-92d87ef0-de90-43ef-aef1-b558be3349b9 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1086.900495] env[62383]: WARNING nova.compute.manager [req-ced87af4-295a-4c0f-9f42-a26bcf82026f req-f6fb5247-7632-47e0-b43f-d92242850791 service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Received unexpected event network-vif-plugged-92d87ef0-de90-43ef-aef1-b558be3349b9 for instance with vm_state building and task_state spawning. [ 1086.974202] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.974202] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1086.974558] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.974626] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.974789] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.974981] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1086.975063] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.976651] env[62383]: DEBUG nova.network.neutron [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Successfully updated port: 92d87ef0-de90-43ef-aef1-b558be3349b9 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1087.236792] env[62383]: INFO nova.compute.manager [-] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Took 1.45 seconds to deallocate network for instance. [ 1087.402768] env[62383]: DEBUG nova.scheduler.client.report [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.479809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.480199] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "refresh_cache-91ce6946-0c8a-4b59-bbe0-54a566a57cdb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1087.480336] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquired lock "refresh_cache-91ce6946-0c8a-4b59-bbe0-54a566a57cdb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.480482] env[62383]: DEBUG nova.network.neutron [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1087.742451] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.909073] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.260s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.910910] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.644s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.911206] env[62383]: DEBUG nova.objects.instance [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'resources' on Instance uuid 981aa014-4861-4ab3-94e3-c113eec9bf29 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.940137] env[62383]: INFO nova.network.neutron [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating port 275a086a-5096-4414-8397-af9ac5331f87 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1088.014693] env[62383]: DEBUG nova.network.neutron [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1088.136787] env[62383]: DEBUG nova.network.neutron [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Updating instance_info_cache with network_info: [{"id": "92d87ef0-de90-43ef-aef1-b558be3349b9", "address": "fa:16:3e:ae:81:2d", "network": {"id": "272e2c5d-45ea-4bd4-a935-a5d408c0bd47", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-332774218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b571010721e4416971f1d4beffe7aef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92d87ef0-de", "ovs_interfaceid": "92d87ef0-de90-43ef-aef1-b558be3349b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.413954] env[62383]: DEBUG nova.objects.instance [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'numa_topology' on Instance uuid 981aa014-4861-4ab3-94e3-c113eec9bf29 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.641370] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Releasing lock "refresh_cache-91ce6946-0c8a-4b59-bbe0-54a566a57cdb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1088.641734] env[62383]: DEBUG nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Instance network_info: |[{"id": "92d87ef0-de90-43ef-aef1-b558be3349b9", "address": "fa:16:3e:ae:81:2d", "network": {"id": "272e2c5d-45ea-4bd4-a935-a5d408c0bd47", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-332774218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b571010721e4416971f1d4beffe7aef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92d87ef0-de", "ovs_interfaceid": "92d87ef0-de90-43ef-aef1-b558be3349b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1088.642463] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:81:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '195e328b-e41a-49f5-9e51-546b8ea8ceba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92d87ef0-de90-43ef-aef1-b558be3349b9', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.649779] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Creating folder: Project (9b571010721e4416971f1d4beffe7aef). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1088.650073] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d7908f1-0b52-4d09-aaed-6131d1d301b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.662859] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Created folder: Project (9b571010721e4416971f1d4beffe7aef) in parent group-v496304. [ 1088.663058] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Creating folder: Instances. Parent ref: group-v496590. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1088.663338] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-070e6cfd-463e-4c16-b5aa-253c65b72bab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.672448] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Created folder: Instances in parent group-v496590. [ 1088.672945] env[62383]: DEBUG oslo.service.loopingcall [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.672945] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.673080] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-203066db-370c-4b9f-9e88-694b0813a3bf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.691606] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.691606] env[62383]: value = "task-2452343" [ 1088.691606] env[62383]: _type = "Task" [ 1088.691606] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.699332] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452343, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.916737] env[62383]: DEBUG nova.objects.base [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Object Instance<981aa014-4861-4ab3-94e3-c113eec9bf29> lazy-loaded attributes: resources,numa_topology {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1088.924306] env[62383]: DEBUG nova.compute.manager [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Received event network-changed-92d87ef0-de90-43ef-aef1-b558be3349b9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1088.924509] env[62383]: DEBUG nova.compute.manager [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Refreshing instance network info cache due to event network-changed-92d87ef0-de90-43ef-aef1-b558be3349b9. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1088.924731] env[62383]: DEBUG oslo_concurrency.lockutils [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] Acquiring lock "refresh_cache-91ce6946-0c8a-4b59-bbe0-54a566a57cdb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1088.924871] env[62383]: DEBUG oslo_concurrency.lockutils [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] Acquired lock "refresh_cache-91ce6946-0c8a-4b59-bbe0-54a566a57cdb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.925100] env[62383]: DEBUG nova.network.neutron [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Refreshing network info cache for port 92d87ef0-de90-43ef-aef1-b558be3349b9 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.053926] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e3536c-acb1-4bc8-a6dd-f1835ee0230d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.061721] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bfb1d8-1ae7-4ab1-8652-4ffffcdcdf6a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.092318] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4b7cc4-9cdf-4fee-878f-31f1ff3c2568 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.098964] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a293ee1-d733-439d-8a1d-fe27ee549ac0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.112135] env[62383]: DEBUG nova.compute.provider_tree [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.201850] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452343, 'name': CreateVM_Task, 'duration_secs': 0.348191} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.201850] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1089.202620] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.202838] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.203286] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1089.203540] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1160805e-595b-4750-bfcd-bb52a7966586 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.208093] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1089.208093] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52479e20-6871-ca0f-3cb7-c5711ea12d63" [ 1089.208093] env[62383]: _type = "Task" [ 1089.208093] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.216220] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52479e20-6871-ca0f-3cb7-c5711ea12d63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.556365] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.556546] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.557603] env[62383]: DEBUG nova.network.neutron [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.614968] env[62383]: DEBUG nova.network.neutron [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Updated VIF entry in instance network info cache for port 92d87ef0-de90-43ef-aef1-b558be3349b9. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1089.615472] env[62383]: DEBUG nova.network.neutron [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Updating instance_info_cache with network_info: [{"id": "92d87ef0-de90-43ef-aef1-b558be3349b9", "address": "fa:16:3e:ae:81:2d", "network": {"id": "272e2c5d-45ea-4bd4-a935-a5d408c0bd47", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-332774218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b571010721e4416971f1d4beffe7aef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "195e328b-e41a-49f5-9e51-546b8ea8ceba", "external-id": "nsx-vlan-transportzone-735", "segmentation_id": 735, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92d87ef0-de", "ovs_interfaceid": "92d87ef0-de90-43ef-aef1-b558be3349b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.617255] env[62383]: DEBUG nova.scheduler.client.report [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.719181] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52479e20-6871-ca0f-3cb7-c5711ea12d63, 'name': SearchDatastore_Task, 'duration_secs': 0.009557} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.719522] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1089.719759] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.719988] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.720153] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.720330] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.720580] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e5f0de5-c008-45c8-b733-08b50314f80d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.728989] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.729176] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.729837] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e489a02-b990-422b-bb4b-55584c7d3209 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.734822] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1089.734822] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c0568a-3643-3c05-8d9e-f71a49cec39d" [ 1089.734822] env[62383]: _type = "Task" [ 1089.734822] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.741788] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c0568a-3643-3c05-8d9e-f71a49cec39d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.122107] env[62383]: DEBUG oslo_concurrency.lockutils [req-6ae36d8e-9500-4339-abff-864a36a4fc48 req-a61f1c77-9505-4e6f-b710-d8c9aca9b2db service nova] Releasing lock "refresh_cache-91ce6946-0c8a-4b59-bbe0-54a566a57cdb" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.122896] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.125325] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.747s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.245222] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c0568a-3643-3c05-8d9e-f71a49cec39d, 'name': SearchDatastore_Task, 'duration_secs': 0.0094} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.245988] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72e974c9-9357-4a59-b707-03fb9d50d4b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.251134] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1090.251134] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52be123d-6f77-f8b1-98f3-79f401dea0eb" [ 1090.251134] env[62383]: _type = "Task" [ 1090.251134] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.256651] env[62383]: DEBUG nova.network.neutron [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap275a086a-50", "ovs_interfaceid": "275a086a-5096-4414-8397-af9ac5331f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.263800] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52be123d-6f77-f8b1-98f3-79f401dea0eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009918} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.264043] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.264291] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 91ce6946-0c8a-4b59-bbe0-54a566a57cdb/91ce6946-0c8a-4b59-bbe0-54a566a57cdb.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.264525] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cdd5b16-0be4-453b-8642-5f1e917b3922 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.271658] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1090.271658] env[62383]: value = "task-2452344" [ 1090.271658] env[62383]: _type = "Task" [ 1090.271658] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.279626] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.632595] env[62383]: INFO nova.compute.claims [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1090.639897] env[62383]: DEBUG oslo_concurrency.lockutils [None req-655019b1-cae9-4907-aa6b-7b60caa4f4a0 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.838s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.640891] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.196s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.641144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.641453] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.641675] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.643261] env[62383]: INFO nova.compute.manager [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Terminating instance [ 1090.759677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1090.781790] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452344, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463332} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.783704] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 91ce6946-0c8a-4b59-bbe0-54a566a57cdb/91ce6946-0c8a-4b59-bbe0-54a566a57cdb.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.783923] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.784407] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6198b74-6265-45d7-8db0-1d59ac12d3e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.790062] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='57faa92468ace9f9b6db74c7ab95c9c1',container_format='bare',created_at=2025-02-11T15:32:52Z,direct_url=,disk_format='vmdk',id=668a5589-791f-4966-838e-a17995d2fb51,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1866884878-shelved',owner='b395bdf2df794b32a117f93fa4887c8e',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2025-02-11T15:33:09Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1090.790297] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1090.790453] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1090.790632] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1090.790779] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1090.790924] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1090.791195] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1090.791390] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1090.791565] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1090.791729] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1090.791903] env[62383]: DEBUG nova.virt.hardware [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1090.792679] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0a8a46-6ba8-4dfb-a8d9-5d58477c15b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.797061] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1090.797061] env[62383]: value = "task-2452345" [ 1090.797061] env[62383]: _type = "Task" [ 1090.797061] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.804290] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b9c803-7e3f-4351-8ec0-b950830b530a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.810748] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452345, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.820969] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:a3:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7908211b-df93-467b-87a8-3c3d29b03de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '275a086a-5096-4414-8397-af9ac5331f87', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.828752] env[62383]: DEBUG oslo.service.loopingcall [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.829035] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1090.829274] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b49cf045-08e2-48e4-846e-366ab8a014b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.848957] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.848957] env[62383]: value = "task-2452346" [ 1090.848957] env[62383]: _type = "Task" [ 1090.848957] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.856842] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452346, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.949195] env[62383]: DEBUG nova.compute.manager [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-vif-plugged-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1090.949408] env[62383]: DEBUG oslo_concurrency.lockutils [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.949624] env[62383]: DEBUG oslo_concurrency.lockutils [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.949798] env[62383]: DEBUG oslo_concurrency.lockutils [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.949968] env[62383]: DEBUG nova.compute.manager [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] No waiting events found dispatching network-vif-plugged-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1090.950151] env[62383]: WARNING nova.compute.manager [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received unexpected event network-vif-plugged-275a086a-5096-4414-8397-af9ac5331f87 for instance with vm_state shelved_offloaded and task_state spawning. [ 1090.950315] env[62383]: DEBUG nova.compute.manager [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-changed-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1090.950468] env[62383]: DEBUG nova.compute.manager [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing instance network info cache due to event network-changed-275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1090.950750] env[62383]: DEBUG oslo_concurrency.lockutils [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] Acquiring lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.950819] env[62383]: DEBUG oslo_concurrency.lockutils [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] Acquired lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.950934] env[62383]: DEBUG nova.network.neutron [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Refreshing network info cache for port 275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1091.139309] env[62383]: INFO nova.compute.resource_tracker [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating resource usage from migration 227d47a8-0fa8-4aa6-b7e6-671721533413 [ 1091.147159] env[62383]: DEBUG nova.compute.manager [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1091.147392] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1091.147725] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b00b4335-6d63-4df7-bf34-9c966a025ac5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.157525] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cea4de-aabe-4841-a883-9156c60f6b68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.194430] env[62383]: WARNING nova.virt.vmwareapi.vmops [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 981aa014-4861-4ab3-94e3-c113eec9bf29 could not be found. [ 1091.194645] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1091.194831] env[62383]: INFO nova.compute.manager [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1091.195088] env[62383]: DEBUG oslo.service.loopingcall [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.197536] env[62383]: DEBUG nova.compute.manager [-] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1091.197641] env[62383]: DEBUG nova.network.neutron [-] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1091.278341] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8747c31e-af7f-4355-9e9b-9a0f605d9134 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.286423] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cad8a82-c062-4437-8e49-4bd72854f04f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.318843] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea826265-066e-422b-8bd4-238d6ebac0fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.326720] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452345, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064547} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.328879] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.329934] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e742ca-a2e1-4849-8643-9c2f488f6db4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.333092] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d145830-666a-4f2c-b8cd-72d82e441a33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.346714] env[62383]: DEBUG nova.compute.provider_tree [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1091.370504] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 91ce6946-0c8a-4b59-bbe0-54a566a57cdb/91ce6946-0c8a-4b59-bbe0-54a566a57cdb.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.374426] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c01c269-bb7f-4c13-a4f9-d85d6abcf54e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.397130] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452346, 'name': CreateVM_Task, 'duration_secs': 0.346431} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.397435] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1091.397768] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1091.397768] env[62383]: value = "task-2452347" [ 1091.397768] env[62383]: _type = "Task" [ 1091.397768] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.398389] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.398544] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.402020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1091.402020] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e90fd2de-c616-4254-8174-9298dc1933eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.407455] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1091.407455] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5285a52a-6879-141a-3ede-631fdfaa1a77" [ 1091.407455] env[62383]: _type = "Task" [ 1091.407455] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.411783] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452347, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.425841] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5285a52a-6879-141a-3ede-631fdfaa1a77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.668909] env[62383]: DEBUG nova.network.neutron [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updated VIF entry in instance network info cache for port 275a086a-5096-4414-8397-af9ac5331f87. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1091.669336] env[62383]: DEBUG nova.network.neutron [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [{"id": "275a086a-5096-4414-8397-af9ac5331f87", "address": "fa:16:3e:2b:a3:17", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap275a086a-50", "ovs_interfaceid": "275a086a-5096-4414-8397-af9ac5331f87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.911120] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452347, 'name': ReconfigVM_Task, 'duration_secs': 0.464481} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.911445] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 91ce6946-0c8a-4b59-bbe0-54a566a57cdb/91ce6946-0c8a-4b59-bbe0-54a566a57cdb.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.912171] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0efd69b4-461f-4f32-ba9d-f7d5edd68207 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.924195] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.924592] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Processing image 668a5589-791f-4966-838e-a17995d2fb51 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.924856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51/668a5589-791f-4966-838e-a17995d2fb51.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.925019] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51/668a5589-791f-4966-838e-a17995d2fb51.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.925215] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.925513] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1091.925513] env[62383]: value = "task-2452348" [ 1091.925513] env[62383]: _type = "Task" [ 1091.925513] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.925697] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a522178-3966-401b-9e9b-08c2cc0afed7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.936454] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452348, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.936795] env[62383]: DEBUG nova.network.neutron [-] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.940186] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.940290] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.941092] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e863ff9-3ba9-436a-a438-2c932f17697a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.947407] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1091.947407] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f1b06f-1960-9ec9-2a58-3a5b7d530bb8" [ 1091.947407] env[62383]: _type = "Task" [ 1091.947407] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.959556] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f1b06f-1960-9ec9-2a58-3a5b7d530bb8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.999096] env[62383]: ERROR nova.scheduler.client.report [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [req-6643ca40-f36d-4e3d-a7a4-d938c9ae4760] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6643ca40-f36d-4e3d-a7a4-d938c9ae4760"}]} [ 1092.016994] env[62383]: DEBUG nova.scheduler.client.report [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1092.033368] env[62383]: DEBUG nova.scheduler.client.report [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1092.033624] env[62383]: DEBUG nova.compute.provider_tree [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.047304] env[62383]: DEBUG nova.scheduler.client.report [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1092.067603] env[62383]: DEBUG nova.scheduler.client.report [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1092.171728] env[62383]: DEBUG oslo_concurrency.lockutils [req-2b555af6-2315-4748-a74d-60ffe9177537 req-9cbc53c6-ec3e-4c22-a086-afa365f0145c service nova] Releasing lock "refresh_cache-ec7c648d-10b0-480a-a5f0-4dab08d0049e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.190926] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8f2ce7-43e8-4016-bd6b-cf8d26d2fdf5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.199098] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ec292d-b596-4919-98a1-d29d3252cf2d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.233604] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6d45ce-7524-44ba-8406-dacf9f15e3ff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.241446] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615b3026-4e43-4204-ab93-cd9bc8348c51 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.255260] env[62383]: DEBUG nova.compute.provider_tree [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.439161] env[62383]: INFO nova.compute.manager [-] [instance: 981aa014-4861-4ab3-94e3-c113eec9bf29] Took 1.24 seconds to deallocate network for instance. [ 1092.439676] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452348, 'name': Rename_Task, 'duration_secs': 0.345934} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.441377] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1092.444117] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa229f0c-e22a-404d-be16-f08cf08a5335 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.452750] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1092.452750] env[62383]: value = "task-2452349" [ 1092.452750] env[62383]: _type = "Task" [ 1092.452750] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.460761] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Preparing fetch location {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1092.461019] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Fetch image to [datastore2] OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0/OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0.vmdk {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1092.461240] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Downloading stream optimized image 668a5589-791f-4966-838e-a17995d2fb51 to [datastore2] OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0/OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0.vmdk on the data store datastore2 as vApp {{(pid=62383) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1092.461418] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Downloading image file data 668a5589-791f-4966-838e-a17995d2fb51 to the ESX as VM named 'OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0' {{(pid=62383) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1092.466304] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.555471] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1092.555471] env[62383]: value = "resgroup-9" [ 1092.555471] env[62383]: _type = "ResourcePool" [ 1092.555471] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1092.555856] env[62383]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3dcffecc-f289-4ef9-b9f9-8f4110e80a43 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.580810] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease: (returnval){ [ 1092.580810] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5248acef-564e-2bd9-37ad-17fb7e089014" [ 1092.580810] env[62383]: _type = "HttpNfcLease" [ 1092.580810] env[62383]: } obtained for vApp import into resource pool (val){ [ 1092.580810] env[62383]: value = "resgroup-9" [ 1092.580810] env[62383]: _type = "ResourcePool" [ 1092.580810] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1092.581195] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the lease: (returnval){ [ 1092.581195] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5248acef-564e-2bd9-37ad-17fb7e089014" [ 1092.581195] env[62383]: _type = "HttpNfcLease" [ 1092.581195] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1092.593302] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1092.593302] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5248acef-564e-2bd9-37ad-17fb7e089014" [ 1092.593302] env[62383]: _type = "HttpNfcLease" [ 1092.593302] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1092.787036] env[62383]: DEBUG nova.scheduler.client.report [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1092.787036] env[62383]: DEBUG nova.compute.provider_tree [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 152 to 153 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1092.787230] env[62383]: DEBUG nova.compute.provider_tree [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1092.963136] env[62383]: DEBUG oslo_vmware.api [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452349, 'name': PowerOnVM_Task, 'duration_secs': 0.479707} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.966294] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1092.966584] env[62383]: INFO nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Took 6.30 seconds to spawn the instance on the hypervisor. [ 1092.966835] env[62383]: DEBUG nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1092.967841] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff0e2ed-6a8a-4549-be5a-5583ed5be3ce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.089888] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1093.089888] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5248acef-564e-2bd9-37ad-17fb7e089014" [ 1093.089888] env[62383]: _type = "HttpNfcLease" [ 1093.089888] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1093.090282] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1093.090282] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5248acef-564e-2bd9-37ad-17fb7e089014" [ 1093.090282] env[62383]: _type = "HttpNfcLease" [ 1093.090282] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1093.090863] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8dc78a-7825-48c6-8f5e-4cacecef87c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.098423] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5228d1c0-9cff-b0eb-2183-5296d7671fc6/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1093.098603] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5228d1c0-9cff-b0eb-2183-5296d7671fc6/disk-0.vmdk. {{(pid=62383) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1093.163037] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b0ea01f7-bc75-4256-9deb-7aacadc0bd1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.292112] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.167s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.292490] env[62383]: INFO nova.compute.manager [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Migrating [ 1093.298694] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.819s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.298870] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.299064] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1093.299375] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.557s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.299590] env[62383]: DEBUG nova.objects.instance [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lazy-loading 'resources' on Instance uuid 8bd05dac-7aa2-44c5-8752-6045c01d213d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.303686] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c09ef75-e605-452b-9c55-cd9a433aee56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.318248] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b8ea1d-aa12-4b19-b31f-e6e6de370bca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.334074] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df55ff52-8b0c-4152-bd90-a49526994ce3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.340972] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181fc09f-59bd-4e9a-9372-553e08a34f06 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.373032] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179944MB free_disk=146GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1093.373205] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.472910] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f3775b57-ea26-4e20-b778-1824ac9e10db tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "981aa014-4861-4ab3-94e3-c113eec9bf29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.832s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.486217] env[62383]: INFO nova.compute.manager [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Took 12.15 seconds to build instance. [ 1093.535814] env[62383]: DEBUG oslo_concurrency.lockutils [None req-613fc353-07d3-4d76-9674-9a24b6b1e543 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.536215] env[62383]: DEBUG oslo_concurrency.lockutils [None req-613fc353-07d3-4d76-9674-9a24b6b1e543 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.536592] env[62383]: DEBUG nova.objects.instance [None req-613fc353-07d3-4d76-9674-9a24b6b1e543 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'flavor' on Instance uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.814693] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.814947] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.815412] env[62383]: DEBUG nova.network.neutron [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1093.969342] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5e3d6d-ad49-4aa1-9bac-98d059b3ff99 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.980823] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b279b9f8-739f-4643-a974-3df4bf522850 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.988359] env[62383]: DEBUG oslo_concurrency.lockutils [None req-583cee76-9167-4fe0-b05c-68e36e08393a tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.654s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.021602] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3253c04b-4ca1-430a-9c2b-3ae48a187e7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.030690] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b1f6b7-9382-42c4-ac96-366f2f6595dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.040154] env[62383]: DEBUG nova.objects.instance [None req-613fc353-07d3-4d76-9674-9a24b6b1e543 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'pci_requests' on Instance uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.049844] env[62383]: DEBUG nova.compute.provider_tree [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.352290] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Completed reading data from the image iterator. {{(pid=62383) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1094.352584] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5228d1c0-9cff-b0eb-2183-5296d7671fc6/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1094.353811] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b679815-b285-4aac-96d8-8310956d8b34 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.362246] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5228d1c0-9cff-b0eb-2183-5296d7671fc6/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1094.362246] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5228d1c0-9cff-b0eb-2183-5296d7671fc6/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1094.362246] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-99238e06-dcdc-4ce4-8d71-c249fd0bdf83 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.552754] env[62383]: DEBUG nova.scheduler.client.report [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1094.556168] env[62383]: DEBUG nova.objects.base [None req-613fc353-07d3-4d76-9674-9a24b6b1e543 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Object Instance<8d9d6f3b-aef7-478a-a43e-3b621f1b3845> lazy-loaded attributes: flavor,pci_requests {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1094.556378] env[62383]: DEBUG nova.network.neutron [None req-613fc353-07d3-4d76-9674-9a24b6b1e543 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1094.558959] env[62383]: DEBUG nova.network.neutron [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.597070] env[62383]: DEBUG oslo_vmware.rw_handles [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5228d1c0-9cff-b0eb-2183-5296d7671fc6/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1094.597275] env[62383]: INFO nova.virt.vmwareapi.images [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Downloaded image file data 668a5589-791f-4966-838e-a17995d2fb51 [ 1094.598142] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b168dd6d-edac-4ccf-9dc4-3b56187da4b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.614751] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5035f9b4-e69f-4c20-9a88-4ab1f9779541 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.658942] env[62383]: DEBUG oslo_concurrency.lockutils [None req-613fc353-07d3-4d76-9674-9a24b6b1e543 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.122s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.669746] env[62383]: INFO nova.virt.vmwareapi.images [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] The imported VM was unregistered [ 1094.672844] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Caching image {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1094.672844] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating directory with path [datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1094.673245] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a387a52-55ab-4bfc-889d-e6ebb6c3f5da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.687926] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created directory with path [datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1094.688134] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0/OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0.vmdk to [datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51/668a5589-791f-4966-838e-a17995d2fb51.vmdk. {{(pid=62383) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1094.688386] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f7d8a325-5a52-4e80-964e-4e2346660632 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.696161] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1094.696161] env[62383]: value = "task-2452352" [ 1094.696161] env[62383]: _type = "Task" [ 1094.696161] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.704358] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452352, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.836251] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "7cece477-9444-4ffd-88a0-d6c821cb7275" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.836559] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.007777] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.008063] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.008355] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.008550] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.008757] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.010880] env[62383]: INFO nova.compute.manager [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Terminating instance [ 1095.061820] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.762s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.064173] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.065755] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.693s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.085066] env[62383]: INFO nova.scheduler.client.report [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Deleted allocations for instance 8bd05dac-7aa2-44c5-8752-6045c01d213d [ 1095.206227] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452352, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.339399] env[62383]: DEBUG nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1095.515140] env[62383]: DEBUG nova.compute.manager [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.516322] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.516376] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20560fb-6bd8-4567-9d60-2fe1944a4694 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.526762] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.527030] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdd2d0fe-7472-4dd6-8f8d-9b5e88667e7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.538591] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1095.538591] env[62383]: value = "task-2452353" [ 1095.538591] env[62383]: _type = "Task" [ 1095.538591] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.551877] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.592034] env[62383]: DEBUG oslo_concurrency.lockutils [None req-51344ee4-35fb-46b7-9b26-4400e1758c34 tempest-ServersV294TestFqdnHostnames-1603620919 tempest-ServersV294TestFqdnHostnames-1603620919-project-member] Lock "8bd05dac-7aa2-44c5-8752-6045c01d213d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.433s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.709212] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452352, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.863797] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.052271] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.082053] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Applying migration context for instance 2f028680-8db4-474a-8f24-880c4702877b as it has an incoming, in-progress migration 227d47a8-0fa8-4aa6-b7e6-671721533413. Migration status is migrating {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1096.083163] env[62383]: INFO nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating resource usage from migration 227d47a8-0fa8-4aa6-b7e6-671721533413 [ 1096.109493] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1b025655-acad-4b70-9e1a-489683cafb7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.109493] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 9f8e346e-815c-492d-84a9-00ebdca3bcc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.109493] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.109493] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 0392d059-57ea-49fb-84d2-b71cbca840db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.109820] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance de24aca8-30fc-453e-b192-b6bb115876ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.109820] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ec7c648d-10b0-480a-a5f0-4dab08d0049e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.109937] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 91ce6946-0c8a-4b59-bbe0-54a566a57cdb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.110290] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Migration 227d47a8-0fa8-4aa6-b7e6-671721533413 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1096.110290] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 2f028680-8db4-474a-8f24-880c4702877b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1096.209314] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452352, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.553399] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452353, 'name': PowerOffVM_Task, 'duration_secs': 0.886406} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.554072] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.554072] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.554343] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32415cf3-a6c3-4a4c-b69e-c9cc4e15598d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.582201] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.583034] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.583034] env[62383]: DEBUG nova.objects.instance [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'flavor' on Instance uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.590310] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d0e4e1-8f5e-4f70-ae15-4e273c718c27 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.610273] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance '2f028680-8db4-474a-8f24-880c4702877b' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1096.614525] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 7cece477-9444-4ffd-88a0-d6c821cb7275 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1096.614811] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1096.615401] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1096.710669] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452352, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.766855] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abc290f-45c8-4f88-8496-c4938053916c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.777920] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7035558-4c65-4a83-9cc0-c8ec13570940 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.811849] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae516a3-981c-4b14-be50-873ce909241c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.820765] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2979f8dd-b24e-4ff2-ab32-3e39013dd5b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.835918] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1096.964429] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.964774] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.965012] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Deleting the datastore file [datastore2] 91ce6946-0c8a-4b59-bbe0-54a566a57cdb {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.965302] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00a511c9-f520-41cd-aa73-bcddbd00140c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.975234] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for the task: (returnval){ [ 1096.975234] env[62383]: value = "task-2452355" [ 1096.975234] env[62383]: _type = "Task" [ 1096.975234] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.988283] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.119787] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.120158] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-521f6035-57d2-4dd8-8f35-efd00fd5e59f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.133971] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1097.133971] env[62383]: value = "task-2452356" [ 1097.133971] env[62383]: _type = "Task" [ 1097.133971] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.147992] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.204342] env[62383]: DEBUG nova.objects.instance [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'pci_requests' on Instance uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.216260] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452352, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.360112] env[62383]: ERROR nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [req-118ec12b-debb-4272-a13b-be560fd82c0b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-118ec12b-debb-4272-a13b-be560fd82c0b"}]} [ 1097.379755] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1097.398998] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1097.399330] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1097.411973] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1097.434357] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1097.490203] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.623389] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2fb55c-558d-4ad2-b329-d854ea320b7f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.641248] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23aef9c8-c988-4949-a55e-ceb6fbb0b158 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.652535] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452356, 'name': PowerOffVM_Task, 'duration_secs': 0.268327} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.680758] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.680995] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance '2f028680-8db4-474a-8f24-880c4702877b' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1097.685903] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cee0c8e-f749-4f6f-aa30-2dc7e500f51f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.696998] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e79b14c-bafc-4510-a221-0cd467d8c402 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.708010] env[62383]: DEBUG nova.objects.base [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Object Instance<8d9d6f3b-aef7-478a-a43e-3b621f1b3845> lazy-loaded attributes: flavor,pci_requests {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1097.708010] env[62383]: DEBUG nova.network.neutron [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1097.720657] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452352, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.979703} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.721122] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1097.723118] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0/OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0.vmdk to [datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51/668a5589-791f-4966-838e-a17995d2fb51.vmdk. [ 1097.723118] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Cleaning up location [datastore2] OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1097.723599] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_1675b62e-aa5b-4d1d-8a4a-968256382fb0 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.723730] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4741ab7-2278-4651-91fd-06e25fe8bd49 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.735166] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1097.735166] env[62383]: value = "task-2452357" [ 1097.735166] env[62383]: _type = "Task" [ 1097.735166] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.748996] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452357, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.781109] env[62383]: DEBUG nova.policy [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1097.989451] env[62383]: DEBUG oslo_vmware.api [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Task: {'id': task-2452355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.746534} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.989451] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.989451] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.989685] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.989804] env[62383]: INFO nova.compute.manager [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Took 2.47 seconds to destroy the instance on the hypervisor. [ 1097.990039] env[62383]: DEBUG oslo.service.loopingcall [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1097.990251] env[62383]: DEBUG nova.compute.manager [-] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.990348] env[62383]: DEBUG nova.network.neutron [-] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1098.146031] env[62383]: DEBUG nova.network.neutron [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Successfully created port: fb190575-295c-42fa-b8ba-fc6f19ccfa01 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1098.191067] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.191362] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.191992] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.191992] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.191992] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.191992] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.192218] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.192533] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.192632] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.192716] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.192891] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.198713] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da8ffef1-7f55-401e-82ae-5f41cfb46ca8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.217037] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1098.217037] env[62383]: value = "task-2452358" [ 1098.217037] env[62383]: _type = "Task" [ 1098.217037] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.232370] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.255048] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452357, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062078} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.255048] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1098.255048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51/668a5589-791f-4966-838e-a17995d2fb51.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.255048] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51/668a5589-791f-4966-838e-a17995d2fb51.vmdk to [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1098.255321] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6cf4fb2-7259-4fb0-8953-19486ed4e9bd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.264445] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1098.264445] env[62383]: value = "task-2452359" [ 1098.264445] env[62383]: _type = "Task" [ 1098.264445] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.270606] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1098.270606] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 154 to 155 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1098.270606] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1098.278242] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.378210] env[62383]: DEBUG nova.compute.manager [req-77e8ea19-b861-4caf-9aae-94e35d280df8 req-7aa9d01d-91b7-4785-ab87-a4683a21e834 service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Received event network-vif-deleted-92d87ef0-de90-43ef-aef1-b558be3349b9 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1098.378270] env[62383]: INFO nova.compute.manager [req-77e8ea19-b861-4caf-9aae-94e35d280df8 req-7aa9d01d-91b7-4785-ab87-a4683a21e834 service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Neutron deleted interface 92d87ef0-de90-43ef-aef1-b558be3349b9; detaching it from the instance and deleting it from the info cache [ 1098.378825] env[62383]: DEBUG nova.network.neutron [req-77e8ea19-b861-4caf-9aae-94e35d280df8 req-7aa9d01d-91b7-4785-ab87-a4683a21e834 service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.687577] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.687892] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.727899] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452358, 'name': ReconfigVM_Task, 'duration_secs': 0.470879} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.728232] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance '2f028680-8db4-474a-8f24-880c4702877b' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1098.748790] env[62383]: DEBUG nova.network.neutron [-] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.774631] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452359, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.779972] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1098.780112] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.714s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.780386] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.917s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.781890] env[62383]: INFO nova.compute.claims [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1098.784926] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.784926] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Cleaning up deleted instances {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1098.880922] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39062522-0f8a-4f7c-a9fc-b81ed09b0ad4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.892892] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6a3301-c565-4699-a51a-815c20479832 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.936028] env[62383]: DEBUG nova.compute.manager [req-77e8ea19-b861-4caf-9aae-94e35d280df8 req-7aa9d01d-91b7-4785-ab87-a4683a21e834 service nova] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Detach interface failed, port_id=92d87ef0-de90-43ef-aef1-b558be3349b9, reason: Instance 91ce6946-0c8a-4b59-bbe0-54a566a57cdb could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1099.190959] env[62383]: INFO nova.compute.manager [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Detaching volume 656a8db8-6cf0-47a8-a1b4-ea050e96ecc1 [ 1099.230463] env[62383]: INFO nova.virt.block_device [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Attempting to driver detach volume 656a8db8-6cf0-47a8-a1b4-ea050e96ecc1 from mountpoint /dev/sdb [ 1099.230710] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1099.230898] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1099.231905] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f0a990-b2eb-40d9-8983-e818b91c750c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.236981] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1099.237236] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.237393] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.237574] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.237719] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.237866] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1099.238080] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1099.238241] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1099.238407] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1099.238569] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1099.238743] env[62383]: DEBUG nova.virt.hardware [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1099.244863] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1099.244863] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b61ac75-d51f-48b5-bb10-5b36228383be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.257908] env[62383]: INFO nova.compute.manager [-] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Took 1.27 seconds to deallocate network for instance. [ 1099.284663] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c394f73-9b8a-43b6-a824-5de1e24dbf5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.287571] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1099.287571] env[62383]: value = "task-2452360" [ 1099.287571] env[62383]: _type = "Task" [ 1099.287571] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.304777] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] There are 63 instances to clean {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1099.304983] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 690dca62-cafb-40f7-92f0-9bbfde3467b6] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1099.317669] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452359, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.321391] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a62c933-60e5-4765-8ec6-4ecb78dc0783 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.324100] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452360, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.345057] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e25c865-bef9-4a4c-9b2c-e0bbe5a268d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.365707] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] The volume has not been displaced from its original location: [datastore2] volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1/volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1099.371666] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1099.372074] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-003d5c10-a5a5-496c-aa0d-1a6093ffcf3d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.392064] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1099.392064] env[62383]: value = "task-2452361" [ 1099.392064] env[62383]: _type = "Task" [ 1099.392064] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.402369] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452361, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.736574] env[62383]: DEBUG nova.compute.manager [req-d94c148b-028d-4527-8864-dbb68368363b req-65260389-7b85-4720-9b3b-094c07e16d79 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-vif-plugged-fb190575-295c-42fa-b8ba-fc6f19ccfa01 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1099.736905] env[62383]: DEBUG oslo_concurrency.lockutils [req-d94c148b-028d-4527-8864-dbb68368363b req-65260389-7b85-4720-9b3b-094c07e16d79 service nova] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.737644] env[62383]: DEBUG oslo_concurrency.lockutils [req-d94c148b-028d-4527-8864-dbb68368363b req-65260389-7b85-4720-9b3b-094c07e16d79 service nova] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.737982] env[62383]: DEBUG oslo_concurrency.lockutils [req-d94c148b-028d-4527-8864-dbb68368363b req-65260389-7b85-4720-9b3b-094c07e16d79 service nova] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.738202] env[62383]: DEBUG nova.compute.manager [req-d94c148b-028d-4527-8864-dbb68368363b req-65260389-7b85-4720-9b3b-094c07e16d79 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] No waiting events found dispatching network-vif-plugged-fb190575-295c-42fa-b8ba-fc6f19ccfa01 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1099.738371] env[62383]: WARNING nova.compute.manager [req-d94c148b-028d-4527-8864-dbb68368363b req-65260389-7b85-4720-9b3b-094c07e16d79 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received unexpected event network-vif-plugged-fb190575-295c-42fa-b8ba-fc6f19ccfa01 for instance with vm_state active and task_state None. [ 1099.781612] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.791684] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452359, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.814801] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8f639983-e7ef-4a63-94b6-5c5256015937] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1099.823729] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452360, 'name': ReconfigVM_Task, 'duration_secs': 0.252229} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.824443] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1099.825837] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2112e3-dbd5-49e1-a8d8-4500d8c6f760 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.851477] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 2f028680-8db4-474a-8f24-880c4702877b/2f028680-8db4-474a-8f24-880c4702877b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.855764] env[62383]: DEBUG nova.network.neutron [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Successfully updated port: fb190575-295c-42fa-b8ba-fc6f19ccfa01 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1099.856962] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7844a63-1c14-4455-84d8-1fb61b2f000e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.873564] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.873763] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.873942] env[62383]: DEBUG nova.network.neutron [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.876387] env[62383]: DEBUG nova.compute.manager [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-changed-fb190575-295c-42fa-b8ba-fc6f19ccfa01 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1099.876572] env[62383]: DEBUG nova.compute.manager [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing instance network info cache due to event network-changed-fb190575-295c-42fa-b8ba-fc6f19ccfa01. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1099.876755] env[62383]: DEBUG oslo_concurrency.lockutils [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.886896] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1099.886896] env[62383]: value = "task-2452362" [ 1099.886896] env[62383]: _type = "Task" [ 1099.886896] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.899531] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452362, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.910954] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452361, 'name': ReconfigVM_Task, 'duration_secs': 0.476233} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.911264] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1099.919069] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6257fe69-072f-4df9-a94c-84d0eee7c817 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.938461] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1099.938461] env[62383]: value = "task-2452363" [ 1099.938461] env[62383]: _type = "Task" [ 1099.938461] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.953432] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452363, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.020788] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faadda4c-b155-47cb-9e72-908573cafd48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.032238] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82802b3c-cd27-48dd-97ab-8b38b77f7167 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.068602] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c8eb68-5804-40b7-b9ce-3964e9cf7ae5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.079088] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9378403-5222-4575-995b-e0170173e289 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.095708] env[62383]: DEBUG nova.compute.provider_tree [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.292745] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452359, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.318545] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 20861554-890b-4ad3-a73f-0c825a79bbf1] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1100.400016] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452362, 'name': ReconfigVM_Task, 'duration_secs': 0.46631} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.400321] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 2f028680-8db4-474a-8f24-880c4702877b/2f028680-8db4-474a-8f24-880c4702877b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1100.400606] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance '2f028680-8db4-474a-8f24-880c4702877b' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1100.413931] env[62383]: WARNING nova.network.neutron [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] ce80ab32-a193-40db-be36-b8085e20a4c5 already exists in list: networks containing: ['ce80ab32-a193-40db-be36-b8085e20a4c5']. ignoring it [ 1100.451912] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452363, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.598762] env[62383]: DEBUG nova.scheduler.client.report [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.786985] env[62383]: DEBUG nova.network.neutron [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "address": "fa:16:3e:a3:d3:c3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb190575-29", "ovs_interfaceid": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.797274] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452359, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.821778] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8c1b4a9b-6f3e-4d26-b5f2-d02d8c3f1270] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1100.907969] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a68f1c-fd27-470f-9aeb-77cf069c01f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.929772] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308bb262-806d-409a-881a-bd7d68d66fa9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.949312] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance '2f028680-8db4-474a-8f24-880c4702877b' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1100.962251] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452363, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.104029] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.104525] env[62383]: DEBUG nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1101.107133] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.326s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.107365] env[62383]: DEBUG nova.objects.instance [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lazy-loading 'resources' on Instance uuid 91ce6946-0c8a-4b59-bbe0-54a566a57cdb {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.292879] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.293526] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1101.293686] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.293946] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452359, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.667005} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.294182] env[62383]: DEBUG oslo_concurrency.lockutils [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.294362] env[62383]: DEBUG nova.network.neutron [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing network info cache for port fb190575-295c-42fa-b8ba-fc6f19ccfa01 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1101.295897] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78bcfc01-1408-49d6-9971-1bb561ff0f40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.298622] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/668a5589-791f-4966-838e-a17995d2fb51/668a5589-791f-4966-838e-a17995d2fb51.vmdk to [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.299545] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b509e5-47cb-4eeb-a19c-f69423d27916 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.323040] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.334070] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ae854f2-c6b7-4e59-be99-af1b37aafd70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.347538] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 4d929f43-cea2-41a0-9822-180a2647be2c] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1101.349642] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1101.349863] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1101.350032] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1101.350222] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1101.350372] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1101.350521] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1101.350776] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1101.350996] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1101.351225] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1101.351450] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1101.351643] env[62383]: DEBUG nova.virt.hardware [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1101.358600] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfiguring VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1101.359360] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4d7af5c-0fdf-4ca4-b4db-66561b1029c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.377830] env[62383]: DEBUG oslo_vmware.api [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1101.377830] env[62383]: value = "task-2452365" [ 1101.377830] env[62383]: _type = "Task" [ 1101.377830] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.382259] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1101.382259] env[62383]: value = "task-2452364" [ 1101.382259] env[62383]: _type = "Task" [ 1101.382259] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.391902] env[62383]: DEBUG oslo_vmware.api [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452365, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.394863] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.462113] env[62383]: DEBUG oslo_vmware.api [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452363, 'name': ReconfigVM_Task, 'duration_secs': 1.233409} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.462441] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496576', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'name': 'volume-656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9f8e346e-815c-492d-84a9-00ebdca3bcc3', 'attached_at': '', 'detached_at': '', 'volume_id': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1', 'serial': '656a8db8-6cf0-47a8-a1b4-ea050e96ecc1'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1101.495906] env[62383]: DEBUG nova.network.neutron [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Port 5ba29557-a079-4404-9449-eeff24a0a3e4 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1101.611773] env[62383]: DEBUG nova.compute.utils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1101.617939] env[62383]: DEBUG nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1101.618138] env[62383]: DEBUG nova.network.neutron [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1101.655285] env[62383]: DEBUG nova.policy [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc391aae95a8405bab7801175514ac8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c15955328966463fa09401a270d95fe0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1101.755243] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbc227d-db6e-4dbf-ba8c-27ddbce55524 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.763823] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2532ca0-4e0f-4ef0-9f14-9d0953e54270 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.796403] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c898e67-645a-41e2-aed8-530af639b095 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.804900] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320e0a56-40b2-4fe5-828a-8750128fa49f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.819563] env[62383]: DEBUG nova.compute.provider_tree [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.860381] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1919c4ae-0e30-42bf-b851-2e6c24ab1ae3] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1101.894608] env[62383]: DEBUG oslo_vmware.api [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452365, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.900390] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452364, 'name': ReconfigVM_Task, 'duration_secs': 0.334096} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.900390] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e/ec7c648d-10b0-480a-a5f0-4dab08d0049e.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.901015] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'encryption_options': None, 'size': 0, 'device_type': 'disk', 'encryption_secret_uuid': None, 'device_name': '/dev/sda', 'encrypted': False, 'guest_format': None, 'boot_index': 0, 'disk_bus': None, 'image_id': 'cac3b430-a1d5-4ad1-92ec-34c2261779a8'}], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '16668e81-9451-4649-b896-c3d3605b070f', 'device_type': None, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496589', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'name': 'volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'ec7c648d-10b0-480a-a5f0-4dab08d0049e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'serial': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7'}, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62383) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1101.901015] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1101.901015] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496589', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'name': 'volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'ec7c648d-10b0-480a-a5f0-4dab08d0049e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'serial': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1101.901712] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc95db6-1a1c-4582-ad8f-bad3e6dc283e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.921104] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7935347-648c-460b-a700-9579e248fb91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.949164] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7/volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.949164] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fb908e8-b192-4108-abd4-2b3d1f809426 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.972043] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1101.972043] env[62383]: value = "task-2452366" [ 1101.972043] env[62383]: _type = "Task" [ 1101.972043] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.976424] env[62383]: DEBUG nova.network.neutron [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Successfully created port: ba594434-1ccb-410e-9307-5e4deb6c17a8 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1101.984491] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452366, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.022739] env[62383]: DEBUG nova.objects.instance [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'flavor' on Instance uuid 9f8e346e-815c-492d-84a9-00ebdca3bcc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.038283] env[62383]: DEBUG nova.network.neutron [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updated VIF entry in instance network info cache for port fb190575-295c-42fa-b8ba-fc6f19ccfa01. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1102.038832] env[62383]: DEBUG nova.network.neutron [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "address": "fa:16:3e:a3:d3:c3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb190575-29", "ovs_interfaceid": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.119145] env[62383]: DEBUG nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1102.322921] env[62383]: DEBUG nova.scheduler.client.report [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1102.363958] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: b451f9ad-cda6-49a3-801e-acbf121e9552] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1102.389254] env[62383]: DEBUG oslo_vmware.api [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452365, 'name': ReconfigVM_Task, 'duration_secs': 0.578575} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.389769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.389981] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfigured VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1102.482245] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452366, 'name': ReconfigVM_Task, 'duration_secs': 0.326439} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.482586] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7/volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.487181] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e14ce4c-57c7-47f9-9bae-418bd0a64a5d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.503063] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1102.503063] env[62383]: value = "task-2452367" [ 1102.503063] env[62383]: _type = "Task" [ 1102.503063] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.520958] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.521222] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.521429] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.527914] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452367, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.544747] env[62383]: DEBUG oslo_concurrency.lockutils [req-fc94fc93-ee30-43d2-b195-77a86676e35e req-a1742216-6411-493c-84a9-3ce55051437a service nova] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.827953] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.851734] env[62383]: INFO nova.scheduler.client.report [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Deleted allocations for instance 91ce6946-0c8a-4b59-bbe0-54a566a57cdb [ 1102.866670] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 3e868358-2aa2-4ddd-9c2e-16eb5c194bb7] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1102.894267] env[62383]: DEBUG oslo_concurrency.lockutils [None req-86969c93-69dc-49bc-a51f-3ce4c3aa8bd3 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.312s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.012368] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452367, 'name': ReconfigVM_Task, 'duration_secs': 0.153505} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.012729] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496589', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'name': 'volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'ec7c648d-10b0-480a-a5f0-4dab08d0049e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'serial': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1103.013346] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-159f73e7-0e0d-4649-b32d-0c469a762bd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.021519] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1103.021519] env[62383]: value = "task-2452368" [ 1103.021519] env[62383]: _type = "Task" [ 1103.021519] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.029817] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452368, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.031871] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0031808f-b706-4616-9032-a669d54a49fc tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.344s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.128226] env[62383]: DEBUG nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1103.149801] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1103.150060] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.150229] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1103.150416] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.150563] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1103.150711] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1103.150912] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1103.151084] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1103.151256] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1103.151437] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1103.151618] env[62383]: DEBUG nova.virt.hardware [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1103.152488] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63d62bf-d6db-4b00-beba-0d15e8a093b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.160826] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01fadd4-4475-4051-902a-7a6af440ae05 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.360105] env[62383]: DEBUG oslo_concurrency.lockutils [None req-38349f63-1bb4-45d9-9b2e-42e4c5983279 tempest-ServerAddressesTestJSON-1289840214 tempest-ServerAddressesTestJSON-1289840214-project-member] Lock "91ce6946-0c8a-4b59-bbe0-54a566a57cdb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.352s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.369738] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 688b0afd-a6e1-4c3f-999d-5975371e888e] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1103.536731] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452368, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.539170] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.540048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.540276] env[62383]: INFO nova.compute.manager [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Shelving [ 1103.566114] env[62383]: DEBUG nova.compute.manager [req-0303be8b-650c-4557-afee-7a77d77849a3 req-6c30cc8b-4803-456c-8a74-92902e968b96 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Received event network-vif-plugged-ba594434-1ccb-410e-9307-5e4deb6c17a8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1103.566356] env[62383]: DEBUG oslo_concurrency.lockutils [req-0303be8b-650c-4557-afee-7a77d77849a3 req-6c30cc8b-4803-456c-8a74-92902e968b96 service nova] Acquiring lock "7cece477-9444-4ffd-88a0-d6c821cb7275-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.566466] env[62383]: DEBUG oslo_concurrency.lockutils [req-0303be8b-650c-4557-afee-7a77d77849a3 req-6c30cc8b-4803-456c-8a74-92902e968b96 service nova] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.566616] env[62383]: DEBUG oslo_concurrency.lockutils [req-0303be8b-650c-4557-afee-7a77d77849a3 req-6c30cc8b-4803-456c-8a74-92902e968b96 service nova] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.566826] env[62383]: DEBUG nova.compute.manager [req-0303be8b-650c-4557-afee-7a77d77849a3 req-6c30cc8b-4803-456c-8a74-92902e968b96 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] No waiting events found dispatching network-vif-plugged-ba594434-1ccb-410e-9307-5e4deb6c17a8 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1103.566958] env[62383]: WARNING nova.compute.manager [req-0303be8b-650c-4557-afee-7a77d77849a3 req-6c30cc8b-4803-456c-8a74-92902e968b96 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Received unexpected event network-vif-plugged-ba594434-1ccb-410e-9307-5e4deb6c17a8 for instance with vm_state building and task_state spawning. [ 1103.586652] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.586843] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.587340] env[62383]: DEBUG nova.network.neutron [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1103.589645] env[62383]: DEBUG nova.network.neutron [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Successfully updated port: ba594434-1ccb-410e-9307-5e4deb6c17a8 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1103.872688] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8994780e-1b8f-4464-a303-a1e68206e770] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1104.032946] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452368, 'name': Rename_Task, 'duration_secs': 0.75092} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.033477] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1104.033839] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b918b52-c2f6-409c-b058-0b3584dd7a30 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.040860] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1104.040860] env[62383]: value = "task-2452369" [ 1104.040860] env[62383]: _type = "Task" [ 1104.040860] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.063120] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452369, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.096676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-7cece477-9444-4ffd-88a0-d6c821cb7275" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.096676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-7cece477-9444-4ffd-88a0-d6c821cb7275" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.096676] env[62383]: DEBUG nova.network.neutron [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.163358] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.163835] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.164177] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.164470] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.164772] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1104.172721] env[62383]: INFO nova.compute.manager [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Terminating instance [ 1104.376542] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 4cfea58a-35cc-4e3f-8f39-0bc00968eb4d] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1104.413060] env[62383]: DEBUG nova.network.neutron [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.554769] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.555582] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.555840] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-601a0fbe-cb2e-40e0-a566-c2752031504f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.563855] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1104.563855] env[62383]: value = "task-2452370" [ 1104.563855] env[62383]: _type = "Task" [ 1104.563855] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.572894] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.628513] env[62383]: DEBUG nova.network.neutron [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1104.677074] env[62383]: DEBUG nova.compute.manager [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1104.677315] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1104.678219] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925992d3-08bb-4069-8a89-524bba1af7c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.687285] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.687568] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-635dc4e5-fffc-401c-954e-b1c282e4b4b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.695220] env[62383]: DEBUG oslo_vmware.api [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1104.695220] env[62383]: value = "task-2452371" [ 1104.695220] env[62383]: _type = "Task" [ 1104.695220] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.708773] env[62383]: DEBUG oslo_vmware.api [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452371, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.769431] env[62383]: DEBUG nova.network.neutron [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Updating instance_info_cache with network_info: [{"id": "ba594434-1ccb-410e-9307-5e4deb6c17a8", "address": "fa:16:3e:cd:fb:59", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba594434-1c", "ovs_interfaceid": "ba594434-1ccb-410e-9307-5e4deb6c17a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.789090] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-a4c18916-d567-413d-974a-6d623f258430" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.789090] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-a4c18916-d567-413d-974a-6d623f258430" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1104.789243] env[62383]: DEBUG nova.objects.instance [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'flavor' on Instance uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.879578] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 4e5bd3ee-605f-4770-b658-9cbc3d0010ab] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1104.915235] env[62383]: DEBUG oslo_concurrency.lockutils [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.053884] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452369, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.074113] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452370, 'name': PowerOffVM_Task, 'duration_secs': 0.292808} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.075027] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.075258] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609980ea-b6b8-47ab-b4f2-f9b501231ff5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.096198] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ed7751-c0fc-462a-9cbb-ae478b857c06 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.208985] env[62383]: DEBUG oslo_vmware.api [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452371, 'name': PowerOffVM_Task, 'duration_secs': 0.176077} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.209285] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.209463] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.209713] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6da87c5a-c179-4359-89ff-155956fe0310 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.272382] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-7cece477-9444-4ffd-88a0-d6c821cb7275" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.272671] env[62383]: DEBUG nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Instance network_info: |[{"id": "ba594434-1ccb-410e-9307-5e4deb6c17a8", "address": "fa:16:3e:cd:fb:59", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba594434-1c", "ovs_interfaceid": "ba594434-1ccb-410e-9307-5e4deb6c17a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1105.273129] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:fb:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba594434-1ccb-410e-9307-5e4deb6c17a8', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1105.280902] env[62383]: DEBUG oslo.service.loopingcall [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.282353] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1105.282637] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.282833] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.283016] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleting the datastore file [datastore2] 9f8e346e-815c-492d-84a9-00ebdca3bcc3 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.283250] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-971b1da7-b14f-488a-a52c-fc45aa5f5b85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.297211] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1a390cd-31aa-4842-94dd-d329a34ac350 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.310095] env[62383]: DEBUG oslo_vmware.api [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1105.310095] env[62383]: value = "task-2452373" [ 1105.310095] env[62383]: _type = "Task" [ 1105.310095] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.311492] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.311492] env[62383]: value = "task-2452374" [ 1105.311492] env[62383]: _type = "Task" [ 1105.311492] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.323347] env[62383]: DEBUG oslo_vmware.api [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.326505] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452374, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.383110] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 362da311-fa2b-435d-b972-155a3ac22cbb] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1105.450298] env[62383]: DEBUG nova.objects.instance [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'pci_requests' on Instance uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.454730] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eeffeb0-a3c6-4cc5-90e9-9fea302d17b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.483520] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7285a2dd-d03c-4126-84d3-fbba5eeb5d7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.494047] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance '2f028680-8db4-474a-8f24-880c4702877b' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1105.560534] env[62383]: DEBUG oslo_vmware.api [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452369, 'name': PowerOnVM_Task, 'duration_secs': 1.127457} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.560866] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.610171] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1105.611361] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4ad3e03a-ae19-4758-8247-1e2171f78416 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.621400] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1105.621400] env[62383]: value = "task-2452375" [ 1105.621400] env[62383]: _type = "Task" [ 1105.621400] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.635774] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452375, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.687007] env[62383]: DEBUG nova.compute.manager [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Received event network-changed-ba594434-1ccb-410e-9307-5e4deb6c17a8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1105.687225] env[62383]: DEBUG nova.compute.manager [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Refreshing instance network info cache due to event network-changed-ba594434-1ccb-410e-9307-5e4deb6c17a8. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1105.687440] env[62383]: DEBUG oslo_concurrency.lockutils [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] Acquiring lock "refresh_cache-7cece477-9444-4ffd-88a0-d6c821cb7275" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.687687] env[62383]: DEBUG oslo_concurrency.lockutils [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] Acquired lock "refresh_cache-7cece477-9444-4ffd-88a0-d6c821cb7275" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.687765] env[62383]: DEBUG nova.network.neutron [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Refreshing network info cache for port ba594434-1ccb-410e-9307-5e4deb6c17a8 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.693934] env[62383]: DEBUG nova.compute.manager [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.694834] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e6b8ec-85ad-40a3-9803-241c30c02c9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.825157] env[62383]: DEBUG oslo_vmware.api [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150154} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.828278] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.828473] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.828693] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.828832] env[62383]: INFO nova.compute.manager [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1105.829098] env[62383]: DEBUG oslo.service.loopingcall [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.829241] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452374, 'name': CreateVM_Task, 'duration_secs': 0.347643} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.829430] env[62383]: DEBUG nova.compute.manager [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1105.829552] env[62383]: DEBUG nova.network.neutron [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1105.831060] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1105.831660] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.831911] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.832135] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1105.832709] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffada5c7-3883-4d36-ae46-21a1cce0a255 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.837326] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1105.837326] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52838cec-c885-74f5-c830-c6db9f086e87" [ 1105.837326] env[62383]: _type = "Task" [ 1105.837326] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.845784] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52838cec-c885-74f5-c830-c6db9f086e87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.886333] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 9d2e3772-e0b2-450a-9dc8-725c4a05cde4] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1105.953023] env[62383]: DEBUG nova.objects.base [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Object Instance<8d9d6f3b-aef7-478a-a43e-3b621f1b3845> lazy-loaded attributes: flavor,pci_requests {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1105.953265] env[62383]: DEBUG nova.network.neutron [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1105.999213] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.999556] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec64de8c-e746-437b-b1f6-f7aa6495592e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.008868] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1106.008868] env[62383]: value = "task-2452376" [ 1106.008868] env[62383]: _type = "Task" [ 1106.008868] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.018134] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452376, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.021544] env[62383]: DEBUG nova.policy [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1106.134620] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452375, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.214474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7efe32be-f63b-45f3-83bf-62cded77837d tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 32.489s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.357157] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52838cec-c885-74f5-c830-c6db9f086e87, 'name': SearchDatastore_Task, 'duration_secs': 0.010546} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.357157] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.357426] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.357676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.357856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.358950] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.360627] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e14b2934-a995-4428-9f4e-74e82505123e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.379613] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.379860] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.381666] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-921a224f-969a-47e3-bcc7-5d538d4dbdae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.389666] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 4136466e-d9c6-448a-b392-415bb7c44a8d] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1106.397029] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1106.397029] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527ae19c-c277-c548-bdfc-1d049c1ce428" [ 1106.397029] env[62383]: _type = "Task" [ 1106.397029] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.407706] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527ae19c-c277-c548-bdfc-1d049c1ce428, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.468902] env[62383]: DEBUG nova.network.neutron [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Updated VIF entry in instance network info cache for port ba594434-1ccb-410e-9307-5e4deb6c17a8. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1106.469471] env[62383]: DEBUG nova.network.neutron [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Updating instance_info_cache with network_info: [{"id": "ba594434-1ccb-410e-9307-5e4deb6c17a8", "address": "fa:16:3e:cd:fb:59", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba594434-1c", "ovs_interfaceid": "ba594434-1ccb-410e-9307-5e4deb6c17a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.521838] env[62383]: DEBUG oslo_vmware.api [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452376, 'name': PowerOnVM_Task, 'duration_secs': 0.422213} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.522334] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.522657] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-be2136db-7388-45e6-a778-8e9ae99cfda9 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance '2f028680-8db4-474a-8f24-880c4702877b' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1106.634968] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452375, 'name': CreateSnapshot_Task, 'duration_secs': 0.838528} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.634968] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1106.635930] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7faf2e4-2a35-486d-8584-59f428b50c87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.864264] env[62383]: DEBUG nova.network.neutron [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.894686] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 161d6537-fe78-4a42-b8a5-e3d7d78c0154] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1106.908163] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527ae19c-c277-c548-bdfc-1d049c1ce428, 'name': SearchDatastore_Task, 'duration_secs': 0.011821} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.909067] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ffc10e3-564b-45dc-84a8-e880049a5838 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.915257] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1106.915257] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52866888-c9ba-9893-23d3-1dd723ad84ea" [ 1106.915257] env[62383]: _type = "Task" [ 1106.915257] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.923837] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52866888-c9ba-9893-23d3-1dd723ad84ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.972377] env[62383]: DEBUG oslo_concurrency.lockutils [req-b3dba639-0403-4f8e-8d5d-c1d271f350bf req-5589fc26-e7b2-413f-b645-ff28ff2d5da7 service nova] Releasing lock "refresh_cache-7cece477-9444-4ffd-88a0-d6c821cb7275" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.159756] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1107.160320] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fbfaf950-f296-44f2-93b2-c93ec15cda17 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.171578] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1107.171578] env[62383]: value = "task-2452377" [ 1107.171578] env[62383]: _type = "Task" [ 1107.171578] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.184755] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452377, 'name': CloneVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.368116] env[62383]: INFO nova.compute.manager [-] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Took 1.54 seconds to deallocate network for instance. [ 1107.397587] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1ab60ef9-4209-4097-8a2c-a55e3a6684b2] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1107.427820] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52866888-c9ba-9893-23d3-1dd723ad84ea, 'name': SearchDatastore_Task, 'duration_secs': 0.014563} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.428745] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.429020] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 7cece477-9444-4ffd-88a0-d6c821cb7275/7cece477-9444-4ffd-88a0-d6c821cb7275.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.429326] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98ac9cc4-4e4b-483d-8a5e-eda8e26ba3df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.439893] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1107.439893] env[62383]: value = "task-2452378" [ 1107.439893] env[62383]: _type = "Task" [ 1107.439893] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.450494] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452378, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.553139] env[62383]: DEBUG nova.compute.manager [req-da489f0c-00a1-4bad-81e5-80cff4e1ebfb req-b95103d2-60e2-40ff-8863-0ed9e940707b service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-vif-plugged-a4c18916-d567-413d-974a-6d623f258430 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1107.553367] env[62383]: DEBUG oslo_concurrency.lockutils [req-da489f0c-00a1-4bad-81e5-80cff4e1ebfb req-b95103d2-60e2-40ff-8863-0ed9e940707b service nova] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.553574] env[62383]: DEBUG oslo_concurrency.lockutils [req-da489f0c-00a1-4bad-81e5-80cff4e1ebfb req-b95103d2-60e2-40ff-8863-0ed9e940707b service nova] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.553814] env[62383]: DEBUG oslo_concurrency.lockutils [req-da489f0c-00a1-4bad-81e5-80cff4e1ebfb req-b95103d2-60e2-40ff-8863-0ed9e940707b service nova] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.553891] env[62383]: DEBUG nova.compute.manager [req-da489f0c-00a1-4bad-81e5-80cff4e1ebfb req-b95103d2-60e2-40ff-8863-0ed9e940707b service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] No waiting events found dispatching network-vif-plugged-a4c18916-d567-413d-974a-6d623f258430 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1107.554064] env[62383]: WARNING nova.compute.manager [req-da489f0c-00a1-4bad-81e5-80cff4e1ebfb req-b95103d2-60e2-40ff-8863-0ed9e940707b service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received unexpected event network-vif-plugged-a4c18916-d567-413d-974a-6d623f258430 for instance with vm_state active and task_state None. [ 1107.650846] env[62383]: DEBUG nova.network.neutron [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Successfully updated port: a4c18916-d567-413d-974a-6d623f258430 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.685286] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452377, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.759381] env[62383]: DEBUG oslo_concurrency.lockutils [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "de24aca8-30fc-453e-b192-b6bb115876ef" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.759710] env[62383]: DEBUG oslo_concurrency.lockutils [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.874409] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.874753] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.875042] env[62383]: DEBUG nova.objects.instance [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'resources' on Instance uuid 9f8e346e-815c-492d-84a9-00ebdca3bcc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1107.901030] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: f193af26-eba8-471f-a00e-0afa9b190d0b] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1107.951406] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452378, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.154047] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.154255] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.154505] env[62383]: DEBUG nova.network.neutron [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1108.183387] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452377, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.262966] env[62383]: DEBUG nova.compute.utils [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1108.404189] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: df5e6f1b-ac01-4ac0-bc84-b49c54c3e771] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1108.451149] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452378, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56098} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.453812] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 7cece477-9444-4ffd-88a0-d6c821cb7275/7cece477-9444-4ffd-88a0-d6c821cb7275.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1108.454059] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1108.454521] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e793b09-b12c-4581-a94f-af979f65cae4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.463288] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1108.463288] env[62383]: value = "task-2452379" [ 1108.463288] env[62383]: _type = "Task" [ 1108.463288] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.474262] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.515721] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573aa3c0-1335-4479-a4f6-467f5c0ea86f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.525277] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cf3c2e-a91d-4645-a44b-dce93b42c84f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.558245] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd92c478-6c7e-4ad9-a3ad-11f2ed72f0bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.567819] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb0806b-2871-4645-a475-6922ef84751d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.582512] env[62383]: DEBUG nova.compute.provider_tree [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.684182] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452377, 'name': CloneVM_Task} progress is 95%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.695529] env[62383]: WARNING nova.network.neutron [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] ce80ab32-a193-40db-be36-b8085e20a4c5 already exists in list: networks containing: ['ce80ab32-a193-40db-be36-b8085e20a4c5']. ignoring it [ 1108.695729] env[62383]: WARNING nova.network.neutron [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] ce80ab32-a193-40db-be36-b8085e20a4c5 already exists in list: networks containing: ['ce80ab32-a193-40db-be36-b8085e20a4c5']. ignoring it [ 1108.765613] env[62383]: DEBUG oslo_concurrency.lockutils [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.906963] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: eb632e2d-b71e-446d-83a2-0bab1d823d27] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1108.974360] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072801} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.974631] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1108.975478] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877fb91a-a0a3-493f-8a9c-110f73801d22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.999219] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 7cece477-9444-4ffd-88a0-d6c821cb7275/7cece477-9444-4ffd-88a0-d6c821cb7275.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1109.001683] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b696d6d8-5a14-48d3-8d26-51e1d6f03e0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.023075] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1109.023075] env[62383]: value = "task-2452380" [ 1109.023075] env[62383]: _type = "Task" [ 1109.023075] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.035938] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452380, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.085607] env[62383]: DEBUG nova.scheduler.client.report [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1109.148266] env[62383]: DEBUG nova.network.neutron [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "address": "fa:16:3e:a3:d3:c3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb190575-29", "ovs_interfaceid": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a4c18916-d567-413d-974a-6d623f258430", "address": "fa:16:3e:48:52:a2", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c18916-d5", "ovs_interfaceid": "a4c18916-d567-413d-974a-6d623f258430", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.183528] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452377, 'name': CloneVM_Task, 'duration_secs': 1.694949} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.183792] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Created linked-clone VM from snapshot [ 1109.184572] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6054a8-ec39-4163-92e0-dee501f86402 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.192917] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Uploading image cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1109.220729] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1109.220729] env[62383]: value = "vm-496597" [ 1109.220729] env[62383]: _type = "VirtualMachine" [ 1109.220729] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1109.221043] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-88d8e81c-3c67-4ce2-8838-348c83f89f85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.229666] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease: (returnval){ [ 1109.229666] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520f4e90-174a-fa97-da54-19dd2b835e63" [ 1109.229666] env[62383]: _type = "HttpNfcLease" [ 1109.229666] env[62383]: } obtained for exporting VM: (result){ [ 1109.229666] env[62383]: value = "vm-496597" [ 1109.229666] env[62383]: _type = "VirtualMachine" [ 1109.229666] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1109.229979] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the lease: (returnval){ [ 1109.229979] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520f4e90-174a-fa97-da54-19dd2b835e63" [ 1109.229979] env[62383]: _type = "HttpNfcLease" [ 1109.229979] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1109.237145] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1109.237145] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520f4e90-174a-fa97-da54-19dd2b835e63" [ 1109.237145] env[62383]: _type = "HttpNfcLease" [ 1109.237145] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1109.376382] env[62383]: DEBUG nova.compute.manager [req-cee931d0-7a55-4fcf-8966-94513baba6cc req-4f94a71c-f730-45cf-ab4f-e9a41b3ee80e service nova] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Received event network-vif-deleted-92d4aebb-165f-462e-96ea-53a36bc5eae8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1109.411057] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: c117e858-696f-43dc-9182-70380214737f] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1109.533620] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452380, 'name': ReconfigVM_Task, 'duration_secs': 0.407745} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.534055] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 7cece477-9444-4ffd-88a0-d6c821cb7275/7cece477-9444-4ffd-88a0-d6c821cb7275.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.534740] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba7273b7-b95d-43bd-ba2d-c4e8be2d9aa8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.545934] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1109.545934] env[62383]: value = "task-2452382" [ 1109.545934] env[62383]: _type = "Task" [ 1109.545934] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.555937] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452382, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.590431] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1109.617434] env[62383]: INFO nova.scheduler.client.report [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted allocations for instance 9f8e346e-815c-492d-84a9-00ebdca3bcc3 [ 1109.651039] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.651883] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.653036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.653036] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea52be7-d77f-40f3-82c2-1ce91c0a38ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.675739] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1109.677470] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1109.677799] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1109.677799] env[62383]: DEBUG nova.virt.hardware [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1109.684449] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfiguring VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1109.685717] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-039d8911-28f8-477b-8271-8b5cb510a090 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.707328] env[62383]: DEBUG oslo_vmware.api [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1109.707328] env[62383]: value = "task-2452383" [ 1109.707328] env[62383]: _type = "Task" [ 1109.707328] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.719753] env[62383]: DEBUG oslo_vmware.api [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452383, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.740249] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1109.740249] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520f4e90-174a-fa97-da54-19dd2b835e63" [ 1109.740249] env[62383]: _type = "HttpNfcLease" [ 1109.740249] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1109.740600] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1109.740600] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520f4e90-174a-fa97-da54-19dd2b835e63" [ 1109.740600] env[62383]: _type = "HttpNfcLease" [ 1109.740600] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1109.741378] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952a8bdf-1213-4959-b2e4-33fa65edb1e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.749898] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d02ea7-3f47-a89c-410b-f53d4ef9a15e/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1109.750211] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d02ea7-3f47-a89c-410b-f53d4ef9a15e/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1109.824439] env[62383]: DEBUG nova.compute.manager [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-changed-a4c18916-d567-413d-974a-6d623f258430 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1109.824789] env[62383]: DEBUG nova.compute.manager [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing instance network info cache due to event network-changed-a4c18916-d567-413d-974a-6d623f258430. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1109.825162] env[62383]: DEBUG oslo_concurrency.lockutils [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1109.825422] env[62383]: DEBUG oslo_concurrency.lockutils [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.825725] env[62383]: DEBUG nova.network.neutron [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Refreshing network info cache for port a4c18916-d567-413d-974a-6d623f258430 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.856867] env[62383]: DEBUG oslo_concurrency.lockutils [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "de24aca8-30fc-453e-b192-b6bb115876ef" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.857208] env[62383]: DEBUG oslo_concurrency.lockutils [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.857565] env[62383]: INFO nova.compute.manager [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Attaching volume 7143f309-78ca-4e0e-91eb-3cc74c67966c to /dev/sdb [ 1109.886737] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9390d64f-6458-4391-8ec3-9185d4393443 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.896128] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae4e77b-3c57-4776-895b-ce96d773941f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.904120] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e0de87-1b0d-431e-9717-f062beb0e6ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.916403] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: c56464dd-63af-4686-b666-d0ac2df01ec1] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1109.926901] env[62383]: DEBUG nova.virt.block_device [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Updating existing volume attachment record: 5ce1b617-35a3-4832-8529-63139f5b910e {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1110.064643] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.064643] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.064643] env[62383]: DEBUG nova.compute.manager [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Going to confirm migration 7 {{(pid=62383) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1110.068272] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452382, 'name': Rename_Task, 'duration_secs': 0.272805} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.068540] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1110.069203] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-037af5ea-162d-4f1d-89a9-a06c703e2882 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.076985] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1110.076985] env[62383]: value = "task-2452384" [ 1110.076985] env[62383]: _type = "Task" [ 1110.076985] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.085747] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.126210] env[62383]: DEBUG oslo_concurrency.lockutils [None req-439f2104-d9a4-4378-aca8-db807ae7a7a4 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "9f8e346e-815c-492d-84a9-00ebdca3bcc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.962s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1110.224605] env[62383]: DEBUG oslo_vmware.api [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.420122] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 188d6b20-3dca-4c1c-8271-1871d2c992d5] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1110.587668] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452384, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.617723] env[62383]: DEBUG nova.network.neutron [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updated VIF entry in instance network info cache for port a4c18916-d567-413d-974a-6d623f258430. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1110.618365] env[62383]: DEBUG nova.network.neutron [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "address": "fa:16:3e:a3:d3:c3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb190575-29", "ovs_interfaceid": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "a4c18916-d567-413d-974a-6d623f258430", "address": "fa:16:3e:48:52:a2", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4c18916-d5", "ovs_interfaceid": "a4c18916-d567-413d-974a-6d623f258430", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.677826] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1110.678092] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.678325] env[62383]: DEBUG nova.network.neutron [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1110.678603] env[62383]: DEBUG nova.objects.instance [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'info_cache' on Instance uuid 2f028680-8db4-474a-8f24-880c4702877b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.722838] env[62383]: DEBUG oslo_vmware.api [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452383, 'name': ReconfigVM_Task, 'duration_secs': 0.784563} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.722838] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1110.722838] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfigured VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1110.927327] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0f6b7094-27a0-4e97-98ac-bff857124b6c] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1111.087572] env[62383]: DEBUG oslo_vmware.api [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452384, 'name': PowerOnVM_Task, 'duration_secs': 0.63362} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.087870] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.088091] env[62383]: INFO nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Took 7.96 seconds to spawn the instance on the hypervisor. [ 1111.088272] env[62383]: DEBUG nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1111.089123] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9811e95-7012-48b9-b9a1-aebf22781141 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.121325] env[62383]: DEBUG oslo_concurrency.lockutils [req-0b97e470-c3a7-4648-a452-98acb501846a req-e3e26213-108d-4610-bc96-69bfd8519159 service nova] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1111.227159] env[62383]: DEBUG oslo_concurrency.lockutils [None req-37687b4f-769f-4001-bd2c-78cd47681c5e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-a4c18916-d567-413d-974a-6d623f258430" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.438s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.434167] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 23d24da6-c7d8-4d6a-8442-a1066505aab1] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1111.444387] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.444387] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.605045] env[62383]: INFO nova.compute.manager [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Took 15.76 seconds to build instance. [ 1111.916870] env[62383]: DEBUG nova.network.neutron [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [{"id": "5ba29557-a079-4404-9449-eeff24a0a3e4", "address": "fa:16:3e:fd:2c:1c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ba29557-a0", "ovs_interfaceid": "5ba29557-a079-4404-9449-eeff24a0a3e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.937500] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: b9669bb8-680f-492a-a7c6-82e6edb0a8ed] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1111.946033] env[62383]: DEBUG nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1112.106935] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e8a2fcb-bae5-4c5c-8bcd-2e41142ab33d tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.270s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.420295] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-2f028680-8db4-474a-8f24-880c4702877b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1112.420556] env[62383]: DEBUG nova.objects.instance [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'migration_context' on Instance uuid 2f028680-8db4-474a-8f24-880c4702877b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.445365] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 4d58d2e6-171d-4346-b281-bcbd22286623] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1112.468507] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.469240] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.470951] env[62383]: INFO nova.compute.claims [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.663474] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-fb190575-295c-42fa-b8ba-fc6f19ccfa01" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.664570] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-fb190575-295c-42fa-b8ba-fc6f19ccfa01" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.739400] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "7cece477-9444-4ffd-88a0-d6c821cb7275" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.739680] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.739838] env[62383]: DEBUG nova.compute.manager [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.740777] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5c6f49-09a0-40f8-b764-fbc1fcb54eb1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.747897] env[62383]: DEBUG nova.compute.manager [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1112.748544] env[62383]: DEBUG nova.objects.instance [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'flavor' on Instance uuid 7cece477-9444-4ffd-88a0-d6c821cb7275 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.923759] env[62383]: DEBUG nova.objects.base [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Object Instance<2f028680-8db4-474a-8f24-880c4702877b> lazy-loaded attributes: info_cache,migration_context {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1112.924782] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713f9292-c873-4629-87ff-531ab8c7200d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.944668] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4f1646b-b9c1-4fb4-9c5c-2b99bbb741d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.948892] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 506afe7c-f19b-4417-b097-485c0244a019] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1112.952061] env[62383]: DEBUG oslo_vmware.api [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1112.952061] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5200fd7e-1237-53d5-0fd4-7242ac71e4d4" [ 1112.952061] env[62383]: _type = "Task" [ 1112.952061] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.960459] env[62383]: DEBUG oslo_vmware.api [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5200fd7e-1237-53d5-0fd4-7242ac71e4d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.166659] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1113.166880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.167808] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df242da-dece-424a-b089-01bf59aa5e4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.188130] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710da573-3f04-48cb-9700-6c7ee147af54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.218488] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfiguring VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1113.219198] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3126bd3-205a-42bd-8635-162cc431938d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.238625] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1113.238625] env[62383]: value = "task-2452387" [ 1113.238625] env[62383]: _type = "Task" [ 1113.238625] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.247141] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.453329] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: eedc7859-3882-4837-9419-f9edce5f12fa] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1113.465911] env[62383]: DEBUG oslo_vmware.api [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5200fd7e-1237-53d5-0fd4-7242ac71e4d4, 'name': SearchDatastore_Task, 'duration_secs': 0.011388} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.466346] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.622176] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2928279-66a3-4513-bb82-ebddaf2d1a5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.632106] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e24297-632b-4099-82b9-7a77e53de82e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.665390] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c83383a-4281-4e3b-920e-7234371eec33 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.673152] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24db68db-0836-4084-9889-fdbb45956bd4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.687951] env[62383]: DEBUG nova.compute.provider_tree [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.750407] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.756168] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1113.756474] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73a8796d-5c1f-440a-99bb-6d4382ed0a31 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.763080] env[62383]: DEBUG oslo_vmware.api [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1113.763080] env[62383]: value = "task-2452388" [ 1113.763080] env[62383]: _type = "Task" [ 1113.763080] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.771789] env[62383]: DEBUG oslo_vmware.api [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.960091] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 40719661-5955-48ec-b289-b37896dd04df] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1114.191058] env[62383]: DEBUG nova.scheduler.client.report [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.250572] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.273631] env[62383]: DEBUG oslo_vmware.api [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452388, 'name': PowerOffVM_Task, 'duration_secs': 0.274389} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.273816] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1114.274048] env[62383]: DEBUG nova.compute.manager [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1114.274852] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883e9507-66c7-4fe8-936b-b6b751a33cb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.463698] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 81921762-ac51-42d2-83dc-d5b6e904fbb7] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1114.480216] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1114.480484] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496598', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'name': 'volume-7143f309-78ca-4e0e-91eb-3cc74c67966c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'de24aca8-30fc-453e-b192-b6bb115876ef', 'attached_at': '', 'detached_at': '', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'serial': '7143f309-78ca-4e0e-91eb-3cc74c67966c'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1114.481421] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497875a0-3978-41eb-b018-f0d895f92268 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.498575] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80dbd2e6-7c17-4368-995e-1bf5df2c31e7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.530082] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-7143f309-78ca-4e0e-91eb-3cc74c67966c/volume-7143f309-78ca-4e0e-91eb-3cc74c67966c.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1114.530913] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab3aa16e-559b-4ed7-96f8-36a8573e827d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.550821] env[62383]: DEBUG oslo_vmware.api [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1114.550821] env[62383]: value = "task-2452389" [ 1114.550821] env[62383]: _type = "Task" [ 1114.550821] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.559606] env[62383]: DEBUG oslo_vmware.api [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452389, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.696455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.227s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.697642] env[62383]: DEBUG nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1114.700834] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.234s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.751575] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.787102] env[62383]: DEBUG oslo_concurrency.lockutils [None req-4e3bab75-2c90-4dff-8c6f-b3a26fefa227 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1114.967793] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 4b3e6064-4462-45e7-b5dd-f2fc22422c3e] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1115.061023] env[62383]: DEBUG oslo_vmware.api [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452389, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.192189] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "7cece477-9444-4ffd-88a0-d6c821cb7275" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.192504] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.192812] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "7cece477-9444-4ffd-88a0-d6c821cb7275-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.193107] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.193332] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.195653] env[62383]: INFO nova.compute.manager [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Terminating instance [ 1115.206944] env[62383]: DEBUG nova.compute.utils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1115.209417] env[62383]: DEBUG nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1115.210599] env[62383]: DEBUG nova.network.neutron [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1115.251257] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.261751] env[62383]: DEBUG nova.policy [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4de9dec9c1d2474eb611f4a2623d602d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aead8ea1d1de4d0d8d8c07dec519d8b4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1115.352907] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e8a325-b78f-4a93-881f-9f511bd8a0b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.361783] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e711f33-8874-4f34-b8c6-5eccf3155992 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.393524] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447404d7-fa19-47af-9c7e-84b1dfb0f507 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.401075] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5b5aa3-e3cc-4645-97ba-d81772a10c56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.414803] env[62383]: DEBUG nova.compute.provider_tree [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.471588] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 7740a70f-3c95-49aa-b3ec-0e0effd3efcc] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1115.551125] env[62383]: DEBUG nova.network.neutron [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Successfully created port: ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1115.563285] env[62383]: DEBUG oslo_vmware.api [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452389, 'name': ReconfigVM_Task, 'duration_secs': 0.641312} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.563611] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-7143f309-78ca-4e0e-91eb-3cc74c67966c/volume-7143f309-78ca-4e0e-91eb-3cc74c67966c.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1115.568567] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22b5d486-7702-418e-a23f-41a1989b3184 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.584619] env[62383]: DEBUG oslo_vmware.api [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1115.584619] env[62383]: value = "task-2452390" [ 1115.584619] env[62383]: _type = "Task" [ 1115.584619] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.594128] env[62383]: DEBUG oslo_vmware.api [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452390, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.700095] env[62383]: DEBUG nova.compute.manager [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1115.700357] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.701319] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb17ddd-e147-4f90-8298-dff943ac99b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.709583] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.709871] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01143caf-6df6-469f-ba42-afd33dba7243 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.712265] env[62383]: DEBUG nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1115.750860] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.785823] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.786148] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.786410] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleting the datastore file [datastore2] 7cece477-9444-4ffd-88a0-d6c821cb7275 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.786706] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f71bd6b-27c0-48f3-9f3e-574980feab8a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.793943] env[62383]: DEBUG oslo_vmware.api [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1115.793943] env[62383]: value = "task-2452392" [ 1115.793943] env[62383]: _type = "Task" [ 1115.793943] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.802292] env[62383]: DEBUG oslo_vmware.api [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.918772] env[62383]: DEBUG nova.scheduler.client.report [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1115.975481] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a04a6a53-cca8-4e15-b840-cb1394e5b188] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1116.095392] env[62383]: DEBUG oslo_vmware.api [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452390, 'name': ReconfigVM_Task, 'duration_secs': 0.156164} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.095731] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496598', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'name': 'volume-7143f309-78ca-4e0e-91eb-3cc74c67966c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'de24aca8-30fc-453e-b192-b6bb115876ef', 'attached_at': '', 'detached_at': '', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'serial': '7143f309-78ca-4e0e-91eb-3cc74c67966c'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1116.254123] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.303601] env[62383]: DEBUG oslo_vmware.api [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196394} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.303783] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.303961] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.304250] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.304447] env[62383]: INFO nova.compute.manager [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1116.304695] env[62383]: DEBUG oslo.service.loopingcall [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1116.304885] env[62383]: DEBUG nova.compute.manager [-] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1116.304998] env[62383]: DEBUG nova.network.neutron [-] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1116.478816] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2eba2920-7912-475b-a198-890743aa5255] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1116.569659] env[62383]: DEBUG nova.compute.manager [req-44a6bbb8-df53-4f4a-9233-a1e3eec54df3 req-65f71bb2-901e-4bbe-9a42-397ea653ee00 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Received event network-vif-deleted-ba594434-1ccb-410e-9307-5e4deb6c17a8 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1116.569881] env[62383]: INFO nova.compute.manager [req-44a6bbb8-df53-4f4a-9233-a1e3eec54df3 req-65f71bb2-901e-4bbe-9a42-397ea653ee00 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Neutron deleted interface ba594434-1ccb-410e-9307-5e4deb6c17a8; detaching it from the instance and deleting it from the info cache [ 1116.570076] env[62383]: DEBUG nova.network.neutron [req-44a6bbb8-df53-4f4a-9233-a1e3eec54df3 req-65f71bb2-901e-4bbe-9a42-397ea653ee00 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.721055] env[62383]: DEBUG nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1116.751350] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1116.751611] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.751911] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1116.752064] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.752239] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1116.752421] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1116.752703] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1116.752911] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1116.753164] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1116.753495] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1116.753593] env[62383]: DEBUG nova.virt.hardware [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1116.754486] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f16ff3-3fb8-4b06-9445-e0327af5396e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.760362] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.765464] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228bf28f-0ade-448d-8133-79fb000c9d8c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.930650] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.230s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.984802] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: bc37e114-cf55-408b-9841-05eaf411b4f5] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1117.052162] env[62383]: DEBUG nova.network.neutron [-] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.062231] env[62383]: DEBUG nova.network.neutron [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Successfully updated port: ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1117.073170] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-add8ace7-6e86-432f-bfbe-b6f59c1c6f52 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.084228] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58cb880-c2bb-4e8f-8bff-5ec1ba868a34 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.114544] env[62383]: DEBUG nova.compute.manager [req-44a6bbb8-df53-4f4a-9233-a1e3eec54df3 req-65f71bb2-901e-4bbe-9a42-397ea653ee00 service nova] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Detach interface failed, port_id=ba594434-1ccb-410e-9307-5e4deb6c17a8, reason: Instance 7cece477-9444-4ffd-88a0-d6c821cb7275 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1117.135388] env[62383]: DEBUG nova.objects.instance [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'flavor' on Instance uuid de24aca8-30fc-453e-b192-b6bb115876ef {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1117.254343] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.399736] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "de24aca8-30fc-453e-b192-b6bb115876ef" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.485737] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 6fda89ec-aee1-4c1e-b005-51a9742abb19] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1117.490489] env[62383]: INFO nova.scheduler.client.report [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted allocation for migration 227d47a8-0fa8-4aa6-b7e6-671721533413 [ 1117.554925] env[62383]: INFO nova.compute.manager [-] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Took 1.25 seconds to deallocate network for instance. [ 1117.564261] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1117.564653] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.564653] env[62383]: DEBUG nova.network.neutron [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1117.640017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-70449f8a-632d-457d-a25d-7589b7a50b8b tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.783s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.641079] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.242s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.754589] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.988966] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 12e6baef-0614-4a12-b958-30b0f56fe486] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1117.997666] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9b980010-ae96-4b1e-834a-c23d90d97df6 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.935s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.063036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.063036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.063269] env[62383]: DEBUG nova.objects.instance [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'resources' on Instance uuid 7cece477-9444-4ffd-88a0-d6c821cb7275 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1118.096381] env[62383]: DEBUG nova.network.neutron [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1118.143818] env[62383]: INFO nova.compute.manager [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Detaching volume 7143f309-78ca-4e0e-91eb-3cc74c67966c [ 1118.180697] env[62383]: INFO nova.virt.block_device [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Attempting to driver detach volume 7143f309-78ca-4e0e-91eb-3cc74c67966c from mountpoint /dev/sdb [ 1118.180965] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1118.181186] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496598', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'name': 'volume-7143f309-78ca-4e0e-91eb-3cc74c67966c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'de24aca8-30fc-453e-b192-b6bb115876ef', 'attached_at': '', 'detached_at': '', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'serial': '7143f309-78ca-4e0e-91eb-3cc74c67966c'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1118.182197] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ed09a2-af15-4d53-aefc-cfac1a466270 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.211567] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.211844] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.212073] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "2f028680-8db4-474a-8f24-880c4702877b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.212263] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.212433] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.214468] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a6cd13-3fd8-4eac-a55b-406276bb444c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.218046] env[62383]: INFO nova.compute.manager [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Terminating instance [ 1118.226456] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4122567-03b4-4c6f-b303-9b9a05435fb4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.252922] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2927305d-ed3c-4db2-8c37-2cc09952a6a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.256454] env[62383]: DEBUG nova.network.neutron [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Updating instance_info_cache with network_info: [{"id": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "address": "fa:16:3e:4a:c3:2d", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff37bd19-eb", "ovs_interfaceid": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.264018] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.274828] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] The volume has not been displaced from its original location: [datastore2] volume-7143f309-78ca-4e0e-91eb-3cc74c67966c/volume-7143f309-78ca-4e0e-91eb-3cc74c67966c.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1118.280062] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1118.280684] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7ca9137-dd03-4d28-a3e9-541c503ea3f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.300031] env[62383]: DEBUG oslo_vmware.api [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1118.300031] env[62383]: value = "task-2452393" [ 1118.300031] env[62383]: _type = "Task" [ 1118.300031] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.308136] env[62383]: DEBUG oslo_vmware.api [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452393, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.402479] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d02ea7-3f47-a89c-410b-f53d4ef9a15e/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1118.403403] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8159da89-c5b9-4e66-bdbf-020926aa1c1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.409576] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d02ea7-3f47-a89c-410b-f53d4ef9a15e/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1118.409740] env[62383]: ERROR oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d02ea7-3f47-a89c-410b-f53d4ef9a15e/disk-0.vmdk due to incomplete transfer. [ 1118.409968] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3902f4f4-327c-489a-9c45-7f08f0e855ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.416544] env[62383]: DEBUG oslo_vmware.rw_handles [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d02ea7-3f47-a89c-410b-f53d4ef9a15e/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1118.416787] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Uploaded image cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1118.419345] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1118.419616] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c7660a27-8b84-4b32-b4d7-7e7810a30f26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.425501] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1118.425501] env[62383]: value = "task-2452394" [ 1118.425501] env[62383]: _type = "Task" [ 1118.425501] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.432551] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452394, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.492886] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 282f2c94-7a63-4eef-aa80-7d67d0a0972a] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1118.607033] env[62383]: DEBUG nova.compute.manager [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Received event network-vif-plugged-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1118.607033] env[62383]: DEBUG oslo_concurrency.lockutils [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] Acquiring lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1118.607241] env[62383]: DEBUG oslo_concurrency.lockutils [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.607475] env[62383]: DEBUG oslo_concurrency.lockutils [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1118.607737] env[62383]: DEBUG nova.compute.manager [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] No waiting events found dispatching network-vif-plugged-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1118.607853] env[62383]: WARNING nova.compute.manager [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Received unexpected event network-vif-plugged-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 for instance with vm_state building and task_state spawning. [ 1118.608014] env[62383]: DEBUG nova.compute.manager [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Received event network-changed-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1118.608214] env[62383]: DEBUG nova.compute.manager [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Refreshing instance network info cache due to event network-changed-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1118.608313] env[62383]: DEBUG oslo_concurrency.lockutils [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] Acquiring lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1118.678058] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac249f63-c422-4a90-811f-a82a029a9fea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.686223] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d489d2af-6efe-4af4-8886-87fe5658bac7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.722632] env[62383]: DEBUG nova.compute.manager [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1118.722901] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1118.723964] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2268193f-d323-4fca-9360-db3f7fd56386 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.727242] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3707fbe0-ba06-45a5-9f53-c6590c9bd97e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.736907] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7029675b-bc46-4ba7-9f71-a6c86462dc59 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.740763] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1118.741041] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1da48b59-9eda-45bd-ab36-51e229244db8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.752495] env[62383]: DEBUG nova.compute.provider_tree [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1118.758545] env[62383]: DEBUG oslo_vmware.api [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1118.758545] env[62383]: value = "task-2452395" [ 1118.758545] env[62383]: _type = "Task" [ 1118.758545] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.759299] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1118.759674] env[62383]: DEBUG nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Instance network_info: |[{"id": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "address": "fa:16:3e:4a:c3:2d", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff37bd19-eb", "ovs_interfaceid": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1118.763510] env[62383]: DEBUG oslo_concurrency.lockutils [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] Acquired lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.763702] env[62383]: DEBUG nova.network.neutron [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Refreshing network info cache for port ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1118.765043] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:c3:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1118.772906] env[62383]: DEBUG oslo.service.loopingcall [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1118.777183] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1118.777486] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.778036] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3638c161-0cb4-4479-9880-a7e44ba2a01c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.794985] env[62383]: DEBUG oslo_vmware.api [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452395, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.799299] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1118.799299] env[62383]: value = "task-2452396" [ 1118.799299] env[62383]: _type = "Task" [ 1118.799299] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.809529] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452396, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.812625] env[62383]: DEBUG oslo_vmware.api [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452393, 'name': ReconfigVM_Task, 'duration_secs': 0.293048} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.812888] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1118.817413] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bedc8746-46bf-4b7f-bf24-9f1e3cf12467 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.833059] env[62383]: DEBUG oslo_vmware.api [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1118.833059] env[62383]: value = "task-2452397" [ 1118.833059] env[62383]: _type = "Task" [ 1118.833059] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.841152] env[62383]: DEBUG oslo_vmware.api [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.938807] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452394, 'name': Destroy_Task, 'duration_secs': 0.365201} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.939080] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Destroyed the VM [ 1118.939351] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1118.939617] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4c7673f9-e281-4223-858b-4e07305d9d4c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.946856] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1118.946856] env[62383]: value = "task-2452398" [ 1118.946856] env[62383]: _type = "Task" [ 1118.946856] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.955082] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452398, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.995886] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a68610a6-f684-4cc9-8dd4-8b90d2d379da] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1119.265159] env[62383]: DEBUG oslo_vmware.api [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452387, 'name': ReconfigVM_Task, 'duration_secs': 5.771557} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.268781] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.269090] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfigured VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1119.278343] env[62383]: DEBUG oslo_vmware.api [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452395, 'name': PowerOffVM_Task, 'duration_secs': 0.236819} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.278841] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1119.279061] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1119.279309] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-49e82ff1-71a0-47e4-9e40-04460c917610 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.281762] env[62383]: ERROR nova.scheduler.client.report [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [req-c671682a-f29a-4e57-b1fd-961f636b5439] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c671682a-f29a-4e57-b1fd-961f636b5439"}]} [ 1119.297358] env[62383]: DEBUG nova.scheduler.client.report [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1119.310779] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452396, 'name': CreateVM_Task, 'duration_secs': 0.389148} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.311756] env[62383]: DEBUG nova.scheduler.client.report [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1119.311935] env[62383]: DEBUG nova.compute.provider_tree [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.313782] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1119.316143] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.316393] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.316558] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1119.316909] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f18fd5b4-d3a7-4028-8f20-7e2f38f60914 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.321486] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1119.321486] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f2c9a5-5de4-78cb-5ee3-ccc608826694" [ 1119.321486] env[62383]: _type = "Task" [ 1119.321486] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.329044] env[62383]: DEBUG nova.scheduler.client.report [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1119.334039] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f2c9a5-5de4-78cb-5ee3-ccc608826694, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.342126] env[62383]: DEBUG oslo_vmware.api [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452397, 'name': ReconfigVM_Task, 'duration_secs': 0.18835} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.343304] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496598', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'name': 'volume-7143f309-78ca-4e0e-91eb-3cc74c67966c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'de24aca8-30fc-453e-b192-b6bb115876ef', 'attached_at': '', 'detached_at': '', 'volume_id': '7143f309-78ca-4e0e-91eb-3cc74c67966c', 'serial': '7143f309-78ca-4e0e-91eb-3cc74c67966c'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1119.345415] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1119.345611] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Deleting contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1119.345821] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleting the datastore file [datastore1] 2f028680-8db4-474a-8f24-880c4702877b {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1119.346270] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63333fb1-8abf-4a68-b97d-e51caf18385f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.348574] env[62383]: DEBUG nova.scheduler.client.report [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1119.357950] env[62383]: DEBUG oslo_vmware.api [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1119.357950] env[62383]: value = "task-2452400" [ 1119.357950] env[62383]: _type = "Task" [ 1119.357950] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.367637] env[62383]: DEBUG oslo_vmware.api [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.456526] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452398, 'name': RemoveSnapshot_Task, 'duration_secs': 0.344673} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.459473] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1119.459759] env[62383]: DEBUG nova.compute.manager [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1119.460501] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a1aa27-7d4b-428f-a316-27f7bd6274d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.463366] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b8e693-b79a-4a6e-930b-fc3767f3322a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.472358] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8dbc2ce-a6d5-47d1-b82e-fab7314caa44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.510732] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a27fcace-4fb3-48fb-946d-b8057f6ee601] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1119.519080] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f62fd821-c6ce-45ba-b211-52aa2d73f0c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.527372] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ad98e5-463e-44ce-a419-d900decaab54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.544653] env[62383]: DEBUG nova.compute.provider_tree [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.618851] env[62383]: DEBUG nova.network.neutron [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Updated VIF entry in instance network info cache for port ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1119.621259] env[62383]: DEBUG nova.network.neutron [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Updating instance_info_cache with network_info: [{"id": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "address": "fa:16:3e:4a:c3:2d", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff37bd19-eb", "ovs_interfaceid": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.832493] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f2c9a5-5de4-78cb-5ee3-ccc608826694, 'name': SearchDatastore_Task, 'duration_secs': 0.013283} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.832763] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1119.832995] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1119.833257] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.833405] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.833580] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1119.833869] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11d71cbe-5b80-497d-bb3b-746357110a1f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.848830] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1119.849034] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1119.849791] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f92a2ae1-f8e8-46fe-ab13-9b15cfba3d60 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.855267] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1119.855267] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a6be0b-ba07-449b-4c63-78e2a620a400" [ 1119.855267] env[62383]: _type = "Task" [ 1119.855267] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.865644] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a6be0b-ba07-449b-4c63-78e2a620a400, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.868205] env[62383]: DEBUG oslo_vmware.api [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169183} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.868430] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1119.868611] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Deleted contents of the VM from datastore datastore1 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1119.868792] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1119.868966] env[62383]: INFO nova.compute.manager [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1119.869218] env[62383]: DEBUG oslo.service.loopingcall [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1119.869406] env[62383]: DEBUG nova.compute.manager [-] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1119.869498] env[62383]: DEBUG nova.network.neutron [-] [instance: 2f028680-8db4-474a-8f24-880c4702877b] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1119.905031] env[62383]: DEBUG nova.objects.instance [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'flavor' on Instance uuid de24aca8-30fc-453e-b192-b6bb115876ef {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1119.986308] env[62383]: INFO nova.compute.manager [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Shelve offloading [ 1120.013782] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 5ef22e87-f73c-47ba-b925-2bd2effe74eb] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1120.084023] env[62383]: DEBUG nova.scheduler.client.report [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1120.084023] env[62383]: DEBUG nova.compute.provider_tree [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 157 to 158 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1120.084023] env[62383]: DEBUG nova.compute.provider_tree [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.118163] env[62383]: DEBUG nova.compute.manager [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-vif-deleted-a4c18916-d567-413d-974a-6d623f258430 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1120.118163] env[62383]: INFO nova.compute.manager [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Neutron deleted interface a4c18916-d567-413d-974a-6d623f258430; detaching it from the instance and deleting it from the info cache [ 1120.118163] env[62383]: DEBUG nova.network.neutron [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "address": "fa:16:3e:a3:d3:c3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb190575-29", "ovs_interfaceid": "fb190575-295c-42fa-b8ba-fc6f19ccfa01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.122613] env[62383]: DEBUG oslo_concurrency.lockutils [req-75061728-3aed-41ef-a812-2667449231bb req-892cda65-98be-47d0-9c6d-18d5b1078e9a service nova] Releasing lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.365612] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a6be0b-ba07-449b-4c63-78e2a620a400, 'name': SearchDatastore_Task, 'duration_secs': 0.035529} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.366390] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-194948dc-a886-414a-b3d9-b3acbe4e0733 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.371203] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1120.371203] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52389c60-8ba5-1442-198b-27c488ffeb66" [ 1120.371203] env[62383]: _type = "Task" [ 1120.371203] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.378472] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52389c60-8ba5-1442-198b-27c488ffeb66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.490636] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1120.490935] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6d0ccaa-1ed3-4619-a94b-8d761ccc7861 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.498305] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1120.498305] env[62383]: value = "task-2452401" [ 1120.498305] env[62383]: _type = "Task" [ 1120.498305] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.506336] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.516954] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 93234e99-268f-491e-96bd-a77f4c9f164b] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1120.591574] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.529s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.618888] env[62383]: INFO nova.scheduler.client.report [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted allocations for instance 7cece477-9444-4ffd-88a0-d6c821cb7275 [ 1120.627250] env[62383]: DEBUG oslo_concurrency.lockutils [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.627250] env[62383]: DEBUG oslo_concurrency.lockutils [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Acquired lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.627250] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d04ebeb-325a-4579-99fd-cf5bf32c892b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.645537] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e920263-b9ec-4e36-8043-cd7a948e7142 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.649683] env[62383]: DEBUG nova.compute.manager [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-vif-deleted-fb190575-295c-42fa-b8ba-fc6f19ccfa01 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1120.649939] env[62383]: INFO nova.compute.manager [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Neutron deleted interface fb190575-295c-42fa-b8ba-fc6f19ccfa01; detaching it from the instance and deleting it from the info cache [ 1120.650249] env[62383]: DEBUG nova.network.neutron [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.651591] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1120.651732] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.651916] env[62383]: DEBUG nova.network.neutron [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1120.671902] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.677576] env[62383]: DEBUG nova.virt.vmwareapi.vmops [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfiguring VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1120.679234] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07759e75-0a3f-485a-b872-7b053eca128a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.699155] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Waiting for the task: (returnval){ [ 1120.699155] env[62383]: value = "task-2452402" [ 1120.699155] env[62383]: _type = "Task" [ 1120.699155] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.706851] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.803910] env[62383]: DEBUG nova.network.neutron [-] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.881673] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52389c60-8ba5-1442-198b-27c488ffeb66, 'name': SearchDatastore_Task, 'duration_secs': 0.020039} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.881948] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.882217] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c94e9a83-04de-4144-ab6e-d96dc7c39e6d/c94e9a83-04de-4144-ab6e-d96dc7c39e6d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1120.882476] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c87ff356-9d32-427f-b392-b673241fd16d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.889491] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1120.889491] env[62383]: value = "task-2452403" [ 1120.889491] env[62383]: _type = "Task" [ 1120.889491] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.897422] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.912065] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae02f6f-c498-48be-9ff9-0b13a2336632 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.271s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.010043] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1121.010366] env[62383]: DEBUG nova.compute.manager [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.011258] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f804ae-e262-4128-8cd2-dd2225c03f9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.017742] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.017997] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.018169] env[62383]: DEBUG nova.network.neutron [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1121.020836] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: e41f5c22-44e0-4de8-a4d0-865fe2c6febd] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1121.127773] env[62383]: DEBUG oslo_concurrency.lockutils [None req-62973afc-da61-4cf2-8315-0a96d58f1e8e tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "7cece477-9444-4ffd-88a0-d6c821cb7275" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.935s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.154290] env[62383]: DEBUG oslo_concurrency.lockutils [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.211556] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.306970] env[62383]: INFO nova.compute.manager [-] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Took 1.44 seconds to deallocate network for instance. [ 1121.394761] env[62383]: INFO nova.network.neutron [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Port fb190575-295c-42fa-b8ba-fc6f19ccfa01 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1121.395357] env[62383]: DEBUG nova.network.neutron [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [{"id": "2a9eb423-4347-4116-825d-0afad0e10ad1", "address": "fa:16:3e:59:b3:80", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9eb423-43", "ovs_interfaceid": "2a9eb423-4347-4116-825d-0afad0e10ad1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.402053] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433824} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.402293] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] c94e9a83-04de-4144-ab6e-d96dc7c39e6d/c94e9a83-04de-4144-ab6e-d96dc7c39e6d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1121.402506] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1121.402749] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-402def60-5328-45f3-9c83-d88264332653 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.412256] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1121.412256] env[62383]: value = "task-2452404" [ 1121.412256] env[62383]: _type = "Task" [ 1121.412256] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.420512] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.523939] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 4cd9c7be-c5f4-460b-a9e2-e8f778076947] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1121.669022] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "de24aca8-30fc-453e-b192-b6bb115876ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.669193] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.669407] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "de24aca8-30fc-453e-b192-b6bb115876ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.669588] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.669778] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.672581] env[62383]: INFO nova.compute.manager [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Terminating instance [ 1121.710430] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.753897] env[62383]: DEBUG nova.network.neutron [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape822f89d-51", "ovs_interfaceid": "e822f89d-516c-4eab-bd54-f1369994f514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.813469] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.813695] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.813939] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.832139] env[62383]: INFO nova.scheduler.client.report [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted allocations for instance 2f028680-8db4-474a-8f24-880c4702877b [ 1121.898496] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.921919] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058279} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.922264] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.923168] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5af8f7f-9c04-4384-821e-fcc7000864fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.946248] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] c94e9a83-04de-4144-ab6e-d96dc7c39e6d/c94e9a83-04de-4144-ab6e-d96dc7c39e6d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.946756] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46a6cd45-8cff-45a0-a149-28e29ff85763 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.965593] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1121.965593] env[62383]: value = "task-2452405" [ 1121.965593] env[62383]: _type = "Task" [ 1121.965593] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.973169] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.028445] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: d0311c29-e1ed-446f-a52b-1687b9561740] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1122.177672] env[62383]: DEBUG nova.compute.manager [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1122.177888] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.178811] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b803dc4f-4b80-4c3c-bf0c-26e60783fe03 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.187016] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.187820] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1092b016-b5cc-4b0e-ad04-fa7189dcf1be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.193613] env[62383]: DEBUG oslo_vmware.api [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1122.193613] env[62383]: value = "task-2452406" [ 1122.193613] env[62383]: _type = "Task" [ 1122.193613] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.202189] env[62383]: DEBUG oslo_vmware.api [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.209246] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "687912b8-40d2-4243-b31c-06107aa6cfb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.209495] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.213757] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.256545] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1122.342749] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dca3afeb-eac5-4908-8e43-22f882f80716 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "2f028680-8db4-474a-8f24-880c4702877b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.131s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.404165] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8e89d9c9-5913-4fdf-b1b9-48ebe69e2e0c tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-8d9d6f3b-aef7-478a-a43e-3b621f1b3845-fb190575-295c-42fa-b8ba-fc6f19ccfa01" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.740s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.475409] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452405, 'name': ReconfigVM_Task, 'duration_secs': 0.332722} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.475705] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Reconfigured VM instance instance-0000006e to attach disk [datastore2] c94e9a83-04de-4144-ab6e-d96dc7c39e6d/c94e9a83-04de-4144-ab6e-d96dc7c39e6d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.476449] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d865c50-f283-44fa-8446-41f61279b0c3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.482762] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1122.482762] env[62383]: value = "task-2452407" [ 1122.482762] env[62383]: _type = "Task" [ 1122.482762] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.490353] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452407, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.533058] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1e367665-1d4b-4686-ac79-c946423c1762] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1122.601371] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.602712] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc96ebea-290f-4ed8-bb9e-c876236e5f7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.611664] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.612087] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18d729c8-3101-4f8b-8060-45635cae6ce2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.677030] env[62383]: DEBUG nova.compute.manager [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-vif-unplugged-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1122.677030] env[62383]: DEBUG oslo_concurrency.lockutils [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.677454] env[62383]: DEBUG oslo_concurrency.lockutils [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.677454] env[62383]: DEBUG oslo_concurrency.lockutils [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.677565] env[62383]: DEBUG nova.compute.manager [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] No waiting events found dispatching network-vif-unplugged-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1122.677902] env[62383]: WARNING nova.compute.manager [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received unexpected event network-vif-unplugged-e822f89d-516c-4eab-bd54-f1369994f514 for instance with vm_state shelved and task_state shelving_offloading. [ 1122.677902] env[62383]: DEBUG nova.compute.manager [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-changed-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1122.678468] env[62383]: DEBUG nova.compute.manager [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing instance network info cache due to event network-changed-e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1122.678468] env[62383]: DEBUG oslo_concurrency.lockutils [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1122.678917] env[62383]: DEBUG oslo_concurrency.lockutils [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.678917] env[62383]: DEBUG nova.network.neutron [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing network info cache for port e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1122.685691] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.685915] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.686138] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleting the datastore file [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.686898] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67ebd18b-ecf3-4e50-91b8-013871edeb22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.695609] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1122.695609] env[62383]: value = "task-2452409" [ 1122.695609] env[62383]: _type = "Task" [ 1122.695609] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.709491] env[62383]: DEBUG oslo_vmware.api [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452406, 'name': PowerOffVM_Task, 'duration_secs': 0.204779} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.712635] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1122.712809] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.713061] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.713266] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b27b2c4-6401-42e4-b018-04188c8948e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.717347] env[62383]: DEBUG nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1122.719972] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.796166] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.796432] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.796592] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleting the datastore file [datastore2] de24aca8-30fc-453e-b192-b6bb115876ef {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.796852] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d372792b-0f6b-4da6-bb7f-e40d4ab09402 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.802474] env[62383]: DEBUG oslo_vmware.api [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1122.802474] env[62383]: value = "task-2452411" [ 1122.802474] env[62383]: _type = "Task" [ 1122.802474] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.810138] env[62383]: DEBUG oslo_vmware.api [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.993346] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452407, 'name': Rename_Task, 'duration_secs': 0.162065} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.993638] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.993885] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6e0ea76-6672-4c5b-b8bb-e1bfe9bcd57a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.001611] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1123.001611] env[62383]: value = "task-2452412" [ 1123.001611] env[62383]: _type = "Task" [ 1123.001611] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.014430] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452412, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.036771] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 9604eadf-a027-46dd-989b-0d4b752f883a] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1123.214105] env[62383]: DEBUG oslo_vmware.api [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169648} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.218613] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.218839] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.219022] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.220813] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.236439] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.236696] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.238164] env[62383]: INFO nova.compute.claims [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.241219] env[62383]: INFO nova.scheduler.client.report [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted allocations for instance 0392d059-57ea-49fb-84d2-b71cbca840db [ 1123.312579] env[62383]: DEBUG oslo_vmware.api [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171275} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.315124] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.315373] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.315595] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.315808] env[62383]: INFO nova.compute.manager [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1123.316108] env[62383]: DEBUG oslo.service.loopingcall [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1123.316318] env[62383]: DEBUG nova.compute.manager [-] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1123.316413] env[62383]: DEBUG nova.network.neutron [-] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1123.471706] env[62383]: DEBUG nova.network.neutron [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updated VIF entry in instance network info cache for port e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1123.472165] env[62383]: DEBUG nova.network.neutron [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": null, "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape822f89d-51", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.511825] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452412, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.540083] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8bd05dac-7aa2-44c5-8752-6045c01d213d] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1123.593910] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "f7584d2c-5add-4764-9aed-22f7d1674854" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.594153] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.714203] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.745455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.975273] env[62383]: DEBUG oslo_concurrency.lockutils [req-9b45b085-6a4b-43a0-920b-46be7ce79cff req-3c2fe7ff-e529-4e90-af62-06f5c72549cb service nova] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.012693] env[62383]: DEBUG oslo_vmware.api [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452412, 'name': PowerOnVM_Task, 'duration_secs': 0.51784} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.012693] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.012693] env[62383]: INFO nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Took 7.29 seconds to spawn the instance on the hypervisor. [ 1124.012693] env[62383]: DEBUG nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.013850] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63bb7e2-34e0-4914-8e07-b62371589625 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.043032] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 67f05a2b-f323-4e4a-ac13-7f4745593be0] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1124.096565] env[62383]: DEBUG nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1124.214210] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.321394] env[62383]: DEBUG nova.network.neutron [-] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.357685] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2663f5d2-92e5-43be-b221-8cdb30ba28d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.364959] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df35a840-b6bc-49b3-9bd5-d112c849c270 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.395895] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e5d0c9-e5ca-4a44-ac02-e31b0dab9994 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.404364] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b01a323-acce-4996-a747-07d32861bfb9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.417196] env[62383]: DEBUG nova.compute.provider_tree [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.530100] env[62383]: INFO nova.compute.manager [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Took 12.08 seconds to build instance. [ 1124.546674] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2cffbbd9-f0b8-46e1-b6db-b74fb2499d1a] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1124.615483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.715848] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.824027] env[62383]: INFO nova.compute.manager [-] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Took 1.51 seconds to deallocate network for instance. [ 1124.922143] env[62383]: DEBUG nova.scheduler.client.report [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.014923] env[62383]: DEBUG nova.compute.manager [req-9fbedde8-4d70-4d48-9af8-f6c159f017fb req-4f6c43c8-8176-4b60-b075-f3f7bbf56f20 service nova] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Received event network-vif-deleted-3e70fa62-b81f-4cf7-950b-772addf79f9c {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1125.032705] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1adb21d4-eb19-4d4f-9cdb-43792ca19046 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.589s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.050080] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 3810ae49-3b9d-4c5f-b579-8abddc8d6c1a] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1125.108269] env[62383]: DEBUG nova.compute.manager [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Received event network-changed-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1125.108401] env[62383]: DEBUG nova.compute.manager [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Refreshing instance network info cache due to event network-changed-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1125.108510] env[62383]: DEBUG oslo_concurrency.lockutils [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] Acquiring lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.108648] env[62383]: DEBUG oslo_concurrency.lockutils [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] Acquired lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.108808] env[62383]: DEBUG nova.network.neutron [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Refreshing network info cache for port ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1125.215611] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.331098] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1125.427105] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.190s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1125.427660] env[62383]: DEBUG nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1125.430240] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.685s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1125.430475] env[62383]: DEBUG nova.objects.instance [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'resources' on Instance uuid 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.553332] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: ca342da5-b6e7-4d00-be10-f7f3e6ff8b7c] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1125.720028] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.820736] env[62383]: DEBUG nova.network.neutron [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Updated VIF entry in instance network info cache for port ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1125.820918] env[62383]: DEBUG nova.network.neutron [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Updating instance_info_cache with network_info: [{"id": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "address": "fa:16:3e:4a:c3:2d", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff37bd19-eb", "ovs_interfaceid": "ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.934169] env[62383]: DEBUG nova.compute.utils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1125.935848] env[62383]: DEBUG nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1125.936132] env[62383]: DEBUG nova.network.neutron [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1125.938669] env[62383]: DEBUG nova.objects.instance [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'numa_topology' on Instance uuid 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.974865] env[62383]: DEBUG nova.policy [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc391aae95a8405bab7801175514ac8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c15955328966463fa09401a270d95fe0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1126.020227] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.056723] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 57eaad0a-ca55-4bff-bbd0-6155ecf1cb93] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1126.220395] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.268558] env[62383]: DEBUG nova.network.neutron [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Successfully created port: 1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1126.323913] env[62383]: DEBUG oslo_concurrency.lockutils [req-48575cd5-b1da-4a77-ba7e-d74f11afef76 req-34af1a83-9ffa-476f-bc07-8027e2732501 service nova] Releasing lock "refresh_cache-c94e9a83-04de-4144-ab6e-d96dc7c39e6d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1126.440774] env[62383]: DEBUG nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1126.443960] env[62383]: DEBUG nova.objects.base [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Object Instance<0392d059-57ea-49fb-84d2-b71cbca840db> lazy-loaded attributes: resources,numa_topology {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1126.552201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0571bf9e-e72c-4aaa-babb-24efbbcd8fea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.559676] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bef613-c7c1-4e5b-93d2-73da8952509f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.564548] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: c2fee51e-3cc9-421c-bfe5-b324a5b14197] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1126.595051] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339762a8-ceb8-49d4-b467-226b0a83173e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.602489] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6af6e8f-5e1e-4e4a-934b-26608452773d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.615853] env[62383]: DEBUG nova.compute.provider_tree [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.719478] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.068326] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 152567ba-f24c-4674-b06e-98c76a3da324] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1127.119016] env[62383]: DEBUG nova.scheduler.client.report [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.220957] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.452880] env[62383]: DEBUG nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1127.482375] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1127.482634] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1127.482791] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1127.482973] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1127.483171] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1127.483332] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1127.483542] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1127.483731] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1127.483877] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1127.484050] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1127.484308] env[62383]: DEBUG nova.virt.hardware [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1127.485235] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409d5f0f-ae90-48a4-9618-4f3585673c29 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.493199] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5c310a-e359-4219-8840-e48550854b02 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.571460] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2337e9a2-736c-4d58-ac2e-04c8ad813be4] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1127.624095] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.194s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.626704] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.011s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.628245] env[62383]: INFO nova.compute.claims [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1127.637553] env[62383]: DEBUG nova.compute.manager [req-fe50ca4f-3177-437e-ab2a-7dd972befcf2 req-fec99998-3dd2-49ee-b3e1-cde0f3cd7bb5 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Received event network-vif-plugged-1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1127.637909] env[62383]: DEBUG oslo_concurrency.lockutils [req-fe50ca4f-3177-437e-ab2a-7dd972befcf2 req-fec99998-3dd2-49ee-b3e1-cde0f3cd7bb5 service nova] Acquiring lock "687912b8-40d2-4243-b31c-06107aa6cfb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.637974] env[62383]: DEBUG oslo_concurrency.lockutils [req-fe50ca4f-3177-437e-ab2a-7dd972befcf2 req-fec99998-3dd2-49ee-b3e1-cde0f3cd7bb5 service nova] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.638179] env[62383]: DEBUG oslo_concurrency.lockutils [req-fe50ca4f-3177-437e-ab2a-7dd972befcf2 req-fec99998-3dd2-49ee-b3e1-cde0f3cd7bb5 service nova] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.638343] env[62383]: DEBUG nova.compute.manager [req-fe50ca4f-3177-437e-ab2a-7dd972befcf2 req-fec99998-3dd2-49ee-b3e1-cde0f3cd7bb5 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] No waiting events found dispatching network-vif-plugged-1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1127.638433] env[62383]: WARNING nova.compute.manager [req-fe50ca4f-3177-437e-ab2a-7dd972befcf2 req-fec99998-3dd2-49ee-b3e1-cde0f3cd7bb5 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Received unexpected event network-vif-plugged-1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 for instance with vm_state building and task_state spawning. [ 1127.724533] env[62383]: DEBUG oslo_vmware.api [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Task: {'id': task-2452402, 'name': ReconfigVM_Task, 'duration_secs': 6.78054} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.725272] env[62383]: DEBUG nova.network.neutron [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Successfully updated port: 1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1127.726467] env[62383]: DEBUG oslo_concurrency.lockutils [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] Releasing lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.726708] env[62383]: DEBUG nova.virt.vmwareapi.vmops [req-09c750d5-2f02-43eb-9c03-6b829072c4e7 req-7b549329-fb20-4462-b069-c411001c5e1f service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Reconfigured VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1127.727395] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 7.056s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.727609] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.727812] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.727978] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.731177] env[62383]: INFO nova.compute.manager [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Terminating instance [ 1128.075276] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 80821717-f961-49c7-8b79-c152edfdfb94] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1128.137482] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f4ccc204-b8de-471f-81c7-a3c91d1a1a75 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.598s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1128.138306] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.118s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.138508] env[62383]: INFO nova.compute.manager [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Unshelving [ 1128.228416] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-687912b8-40d2-4243-b31c-06107aa6cfb9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1128.228577] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-687912b8-40d2-4243-b31c-06107aa6cfb9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.228732] env[62383]: DEBUG nova.network.neutron [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1128.235811] env[62383]: DEBUG nova.compute.manager [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1128.235811] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1128.237174] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe300c9-09cf-4bce-ab14-a9d2cd83aceb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.245223] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1128.245403] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bde8b65-8333-414c-8b7b-d6a58c2fa919 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.252130] env[62383]: DEBUG oslo_vmware.api [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1128.252130] env[62383]: value = "task-2452414" [ 1128.252130] env[62383]: _type = "Task" [ 1128.252130] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.260738] env[62383]: DEBUG oslo_vmware.api [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.578792] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8a165d96-f503-4bc5-bff4-e6a85201e137] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1128.758983] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80835a1f-de9f-4a51-b22c-31ad1dedecc0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.765975] env[62383]: DEBUG oslo_vmware.api [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452414, 'name': PowerOffVM_Task, 'duration_secs': 0.180849} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.766761] env[62383]: DEBUG nova.network.neutron [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1128.768941] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1128.769130] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1128.769383] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41f9762a-96a5-4a97-98e5-9ad68c2bf98a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.773780] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4c8249-e6cb-4678-b3bb-2b8d0ff094d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.806781] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e52ba0-24c8-417e-a77a-9f6ebac67afa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.814201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d532971a-77e0-444e-8c52-d0920d119732 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.828473] env[62383]: DEBUG nova.compute.provider_tree [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.831753] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1128.831984] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1128.832195] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleting the datastore file [datastore2] 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1128.832499] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c34e493-8d03-4636-8b8d-c0f4616100b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.839580] env[62383]: DEBUG oslo_vmware.api [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1128.839580] env[62383]: value = "task-2452417" [ 1128.839580] env[62383]: _type = "Task" [ 1128.839580] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.848153] env[62383]: DEBUG oslo_vmware.api [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.933586] env[62383]: DEBUG nova.network.neutron [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Updating instance_info_cache with network_info: [{"id": "1dd1fea9-5c32-479b-879c-f6ca6cdcbc26", "address": "fa:16:3e:af:04:bb", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd1fea9-5c", "ovs_interfaceid": "1dd1fea9-5c32-479b-879c-f6ca6cdcbc26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.083656] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: da16da02-25ab-46f9-9070-9fdde0b3a75e] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1129.165093] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.331942] env[62383]: DEBUG nova.scheduler.client.report [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.350033] env[62383]: DEBUG oslo_vmware.api [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.437224] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-687912b8-40d2-4243-b31c-06107aa6cfb9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1129.437302] env[62383]: DEBUG nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Instance network_info: |[{"id": "1dd1fea9-5c32-479b-879c-f6ca6cdcbc26", "address": "fa:16:3e:af:04:bb", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd1fea9-5c", "ovs_interfaceid": "1dd1fea9-5c32-479b-879c-f6ca6cdcbc26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1129.437667] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:04:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1dd1fea9-5c32-479b-879c-f6ca6cdcbc26', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1129.445398] env[62383]: DEBUG oslo.service.loopingcall [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1129.445621] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1129.445843] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b88fc1c-48ab-4358-90e6-0a1263fe1aa1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.465505] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1129.465505] env[62383]: value = "task-2452418" [ 1129.465505] env[62383]: _type = "Task" [ 1129.465505] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.474785] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452418, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.587045] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2c93bdf1-aaf4-4e40-898a-634dc00d05e6] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1129.665594] env[62383]: DEBUG nova.compute.manager [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Received event network-changed-1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1129.665701] env[62383]: DEBUG nova.compute.manager [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Refreshing instance network info cache due to event network-changed-1dd1fea9-5c32-479b-879c-f6ca6cdcbc26. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1129.665937] env[62383]: DEBUG oslo_concurrency.lockutils [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] Acquiring lock "refresh_cache-687912b8-40d2-4243-b31c-06107aa6cfb9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1129.666054] env[62383]: DEBUG oslo_concurrency.lockutils [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] Acquired lock "refresh_cache-687912b8-40d2-4243-b31c-06107aa6cfb9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.666637] env[62383]: DEBUG nova.network.neutron [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Refreshing network info cache for port 1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1129.837599] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.837599] env[62383]: DEBUG nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1129.839935] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.509s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.840203] env[62383]: DEBUG nova.objects.instance [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'resources' on Instance uuid de24aca8-30fc-453e-b192-b6bb115876ef {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1129.850216] env[62383]: DEBUG oslo_vmware.api [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.56645} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.850455] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1129.850633] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1129.850802] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1129.850972] env[62383]: INFO nova.compute.manager [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1129.851218] env[62383]: DEBUG oslo.service.loopingcall [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1129.851407] env[62383]: DEBUG nova.compute.manager [-] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1129.851500] env[62383]: DEBUG nova.network.neutron [-] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1129.975273] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452418, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.090704] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1a740010-ddd0-4df6-8ae6-02f1ed50137f] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1130.343010] env[62383]: DEBUG nova.compute.utils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1130.346710] env[62383]: DEBUG nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1130.346889] env[62383]: DEBUG nova.network.neutron [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1130.393459] env[62383]: DEBUG nova.policy [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e94f486c637c4b9f8c3cfa649688a603', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e320302a6b1e466e887c787006413dec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1130.478631] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452418, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.486906] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18fa3fe-1c55-4f9d-8b21-bc594bb74420 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.493718] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d176ada5-abb7-42cf-81aa-71751f9cf2d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.534366] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdbc1c8-c919-45da-8703-75a4ea7b20b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.542163] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33a6e69-c5b6-4331-a5d8-a420548c7485 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.556672] env[62383]: DEBUG nova.compute.provider_tree [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.594513] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a8d56b8e-fa11-4844-ab65-a2e5d24b1e07] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1130.598130] env[62383]: DEBUG nova.network.neutron [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Updated VIF entry in instance network info cache for port 1dd1fea9-5c32-479b-879c-f6ca6cdcbc26. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1130.598130] env[62383]: DEBUG nova.network.neutron [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Updating instance_info_cache with network_info: [{"id": "1dd1fea9-5c32-479b-879c-f6ca6cdcbc26", "address": "fa:16:3e:af:04:bb", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1dd1fea9-5c", "ovs_interfaceid": "1dd1fea9-5c32-479b-879c-f6ca6cdcbc26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.802163] env[62383]: DEBUG nova.network.neutron [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Successfully created port: fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1130.851614] env[62383]: DEBUG nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1130.977295] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452418, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.059946] env[62383]: DEBUG nova.scheduler.client.report [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.100288] env[62383]: DEBUG oslo_concurrency.lockutils [req-b559d5ad-3673-4612-92b0-b9fb31602240 req-14f9e844-ff04-4e7f-af79-4044d2558d50 service nova] Releasing lock "refresh_cache-687912b8-40d2-4243-b31c-06107aa6cfb9" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.100687] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.100830] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Cleaning up deleted instances with incomplete migration {{(pid=62383) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1131.128491] env[62383]: DEBUG nova.network.neutron [-] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.477599] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452418, 'name': CreateVM_Task, 'duration_secs': 1.99195} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.477786] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1131.478488] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.478657] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.478987] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1131.479257] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df7744f0-d4be-4262-95e1-96d010d36c8b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.483520] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1131.483520] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524533e3-18c0-1020-a51e-285a8ee5f25d" [ 1131.483520] env[62383]: _type = "Task" [ 1131.483520] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.491168] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524533e3-18c0-1020-a51e-285a8ee5f25d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.565464] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.725s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.568192] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.403s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.568446] env[62383]: DEBUG nova.objects.instance [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'pci_requests' on Instance uuid 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.587516] env[62383]: INFO nova.scheduler.client.report [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted allocations for instance de24aca8-30fc-453e-b192-b6bb115876ef [ 1131.603435] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1131.631109] env[62383]: INFO nova.compute.manager [-] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Took 1.78 seconds to deallocate network for instance. [ 1131.690873] env[62383]: DEBUG nova.compute.manager [req-eff2228c-b5c7-409e-971e-43331717c7c3 req-ef61e919-528c-4c00-b258-0d4436a8cc3b service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Received event network-vif-deleted-2a9eb423-4347-4116-825d-0afad0e10ad1 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1131.862844] env[62383]: DEBUG nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1131.889572] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1131.889811] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1131.889966] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1131.890169] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1131.890316] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1131.890462] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1131.890666] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1131.890827] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1131.890989] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1131.891172] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1131.891343] env[62383]: DEBUG nova.virt.hardware [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1131.892263] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97032fd-5eaa-4036-a68c-cc554b16281c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.900165] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33c7454-c31c-4d6d-998b-d7061a0afb60 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.992532] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524533e3-18c0-1020-a51e-285a8ee5f25d, 'name': SearchDatastore_Task, 'duration_secs': 0.017707} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.992812] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1131.993048] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1131.993354] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1131.993495] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.993670] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1131.993903] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e720209c-f806-4c6e-ac63-c8269291d5a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.002108] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1132.002108] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1132.002777] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-122c9ffb-4285-4810-b90c-8f5a5822dded {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.008077] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1132.008077] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]520efbe6-cb5a-e9e0-c2b3-3b9477e4f331" [ 1132.008077] env[62383]: _type = "Task" [ 1132.008077] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.015574] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520efbe6-cb5a-e9e0-c2b3-3b9477e4f331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.072679] env[62383]: DEBUG nova.objects.instance [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'numa_topology' on Instance uuid 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.096153] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8187d26f-459e-4f3b-81e7-3b3f8022f0e7 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "de24aca8-30fc-453e-b192-b6bb115876ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.427s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.137271] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.157895] env[62383]: DEBUG nova.compute.manager [req-d86086f8-94d7-4943-8ab1-92051077735b req-f4ead335-802e-4bc9-b1e9-a0855cd45c1a service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Received event network-vif-plugged-fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1132.158075] env[62383]: DEBUG oslo_concurrency.lockutils [req-d86086f8-94d7-4943-8ab1-92051077735b req-f4ead335-802e-4bc9-b1e9-a0855cd45c1a service nova] Acquiring lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.158285] env[62383]: DEBUG oslo_concurrency.lockutils [req-d86086f8-94d7-4943-8ab1-92051077735b req-f4ead335-802e-4bc9-b1e9-a0855cd45c1a service nova] Lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.158458] env[62383]: DEBUG oslo_concurrency.lockutils [req-d86086f8-94d7-4943-8ab1-92051077735b req-f4ead335-802e-4bc9-b1e9-a0855cd45c1a service nova] Lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.158627] env[62383]: DEBUG nova.compute.manager [req-d86086f8-94d7-4943-8ab1-92051077735b req-f4ead335-802e-4bc9-b1e9-a0855cd45c1a service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] No waiting events found dispatching network-vif-plugged-fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1132.158791] env[62383]: WARNING nova.compute.manager [req-d86086f8-94d7-4943-8ab1-92051077735b req-f4ead335-802e-4bc9-b1e9-a0855cd45c1a service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Received unexpected event network-vif-plugged-fbc86f1d-2da8-4092-baac-7867624b1100 for instance with vm_state building and task_state spawning. [ 1132.237144] env[62383]: DEBUG nova.network.neutron [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Successfully updated port: fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1132.370829] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.371157] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.518159] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]520efbe6-cb5a-e9e0-c2b3-3b9477e4f331, 'name': SearchDatastore_Task, 'duration_secs': 0.008368} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.519012] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-071150ea-ed8c-40a2-9e6b-5f7e0f02666e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.523657] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1132.523657] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527d7608-995f-ebdb-c469-4ece4b733958" [ 1132.523657] env[62383]: _type = "Task" [ 1132.523657] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.530981] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527d7608-995f-ebdb-c469-4ece4b733958, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.575791] env[62383]: INFO nova.compute.claims [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1132.740272] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1132.740396] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.740574] env[62383]: DEBUG nova.network.neutron [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.879022] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.879022] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1133.036323] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527d7608-995f-ebdb-c469-4ece4b733958, 'name': SearchDatastore_Task, 'duration_secs': 0.011305} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.036604] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.036854] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 687912b8-40d2-4243-b31c-06107aa6cfb9/687912b8-40d2-4243-b31c-06107aa6cfb9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1133.037146] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11361a3c-1205-4daa-b9bd-9c185d744b5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.044256] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1133.044256] env[62383]: value = "task-2452421" [ 1133.044256] env[62383]: _type = "Task" [ 1133.044256] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.057728] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452421, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.274396] env[62383]: DEBUG nova.network.neutron [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1133.412073] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1133.412318] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.412414] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1133.419952] env[62383]: DEBUG nova.network.neutron [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1133.556510] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452421, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487322} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.557760] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 687912b8-40d2-4243-b31c-06107aa6cfb9/687912b8-40d2-4243-b31c-06107aa6cfb9.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1133.557760] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1133.557760] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a06ad29-2dfe-453f-a5f0-4c03c0f2acdc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.564443] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1133.564443] env[62383]: value = "task-2452422" [ 1133.564443] env[62383]: _type = "Task" [ 1133.564443] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.572033] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452422, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.679414] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b922b9-9b92-44c1-9d58-481aee1226ea {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.687379] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b572775-7652-4bf8-8cd7-261cb50e2aa3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.717234] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b0b502-fb82-4a95-b9a4-8a25cb6f65a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.724342] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a2cc47-1857-4a05-a384-72cffb73c900 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.739171] env[62383]: DEBUG nova.compute.provider_tree [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.923768] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1133.924072] env[62383]: DEBUG nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Instance network_info: |[{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1133.924473] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:29:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbc86f1d-2da8-4092-baac-7867624b1100', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.931811] env[62383]: DEBUG oslo.service.loopingcall [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1133.932015] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1133.932265] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae297e28-eed2-4698-8884-ac195a3d9c19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.953704] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.953704] env[62383]: value = "task-2452423" [ 1133.953704] env[62383]: _type = "Task" [ 1133.953704] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.961825] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452423, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.074987] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452422, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06972} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.076078] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1134.076321] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072e6bc4-84b8-4e98-9c6c-1d991d39a4db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.098497] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 687912b8-40d2-4243-b31c-06107aa6cfb9/687912b8-40d2-4243-b31c-06107aa6cfb9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1134.098641] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-729641cb-2bee-416d-b09a-139097f60e85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.118020] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1134.118020] env[62383]: value = "task-2452424" [ 1134.118020] env[62383]: _type = "Task" [ 1134.118020] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.125306] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.186671] env[62383]: DEBUG nova.compute.manager [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Received event network-changed-fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1134.186839] env[62383]: DEBUG nova.compute.manager [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Refreshing instance network info cache due to event network-changed-fbc86f1d-2da8-4092-baac-7867624b1100. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1134.187070] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] Acquiring lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.187216] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] Acquired lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.187382] env[62383]: DEBUG nova.network.neutron [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Refreshing network info cache for port fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1134.243797] env[62383]: DEBUG nova.scheduler.client.report [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.338159] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "89033750-629f-4ddb-a309-56d50f798a8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1134.338479] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.466061] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452423, 'name': CreateVM_Task, 'duration_secs': 0.485498} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.466232] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1134.466940] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.467159] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.467477] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1134.467724] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5328eb52-3d43-4469-8843-208f960bbd2f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.472508] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1134.472508] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b950a0-987c-dbfd-af64-de7affd67a08" [ 1134.472508] env[62383]: _type = "Task" [ 1134.472508] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.479588] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b950a0-987c-dbfd-af64-de7affd67a08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.627293] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452424, 'name': ReconfigVM_Task, 'duration_secs': 0.343468} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.627561] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 687912b8-40d2-4243-b31c-06107aa6cfb9/687912b8-40d2-4243-b31c-06107aa6cfb9.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1134.628191] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7db6491-296f-426a-a413-31c0be9e798f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.633812] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1134.633812] env[62383]: value = "task-2452425" [ 1134.633812] env[62383]: _type = "Task" [ 1134.633812] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.638130] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updating instance_info_cache with network_info: [{"id": "845110d3-620c-4852-8aab-e6907d5b3af2", "address": "fa:16:3e:0d:f6:13", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845110d3-62", "ovs_interfaceid": "845110d3-620c-4852-8aab-e6907d5b3af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.641780] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452425, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.748744] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.181s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.750835] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.614s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1134.751054] env[62383]: DEBUG nova.objects.instance [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'resources' on Instance uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1134.783109] env[62383]: INFO nova.network.neutron [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating port e822f89d-516c-4eab-bd54-f1369994f514 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1134.840769] env[62383]: DEBUG nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1134.949785] env[62383]: DEBUG nova.network.neutron [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updated VIF entry in instance network info cache for port fbc86f1d-2da8-4092-baac-7867624b1100. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1134.950456] env[62383]: DEBUG nova.network.neutron [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.983531] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b950a0-987c-dbfd-af64-de7affd67a08, 'name': SearchDatastore_Task, 'duration_secs': 0.020563} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.983836] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1134.984076] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.984309] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.984462] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.984626] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1134.984880] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c603311-924d-4e45-bcd8-c286d5db4587 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.994947] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1134.995141] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1134.995840] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ced891b-e6ed-4ba3-8256-0230af39b684 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.000662] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1135.000662] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525c94a4-945b-509c-8401-a433123f9ec4" [ 1135.000662] env[62383]: _type = "Task" [ 1135.000662] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.008081] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525c94a4-945b-509c-8401-a433123f9ec4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.143814] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.144038] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1135.144281] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452425, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.144486] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.144673] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.144790] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.144925] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.368167] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.382041] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b934f8-8b84-49b7-8ad9-30dc494c9330 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.391075] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3a48e7-29b9-4935-ab59-4dfbf316190f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.424605] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc49880-d4fb-4540-977f-d50184e92946 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.432386] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3475b1f-7718-4be0-8f0d-811a08c05f03 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.447821] env[62383]: DEBUG nova.compute.provider_tree [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.453362] env[62383]: DEBUG oslo_concurrency.lockutils [req-2ae8e977-fb27-4f75-9c23-6a061419f5b8 req-e4e9a85f-05e5-4768-958e-302892ed0634 service nova] Releasing lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.513276] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]525c94a4-945b-509c-8401-a433123f9ec4, 'name': SearchDatastore_Task, 'duration_secs': 0.015039} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.513999] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c15390fe-8b24-43e7-b215-ecca4a58c6f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.519393] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1135.519393] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52664394-cb4b-b5f1-3366-e9f0331fa564" [ 1135.519393] env[62383]: _type = "Task" [ 1135.519393] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.527289] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52664394-cb4b-b5f1-3366-e9f0331fa564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.644433] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452425, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.648381] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Getting list of instances from cluster (obj){ [ 1135.648381] env[62383]: value = "domain-c8" [ 1135.648381] env[62383]: _type = "ClusterComputeResource" [ 1135.648381] env[62383]: } {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1135.649306] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ccf2f7-8cc1-451c-aaa8-44bafaced19b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.663730] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Got total of 5 instances {{(pid=62383) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1135.663909] env[62383]: WARNING nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] While synchronizing instance power states, found 7 instances in the database and 5 instances on the hypervisor. [ 1135.664065] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Triggering sync for uuid 1b025655-acad-4b70-9e1a-489683cafb7e {{(pid=62383) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1135.664274] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Triggering sync for uuid ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1135.664442] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Triggering sync for uuid 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 {{(pid=62383) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1135.664603] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Triggering sync for uuid 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1135.664758] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Triggering sync for uuid c94e9a83-04de-4144-ab6e-d96dc7c39e6d {{(pid=62383) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1135.664909] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Triggering sync for uuid 687912b8-40d2-4243-b31c-06107aa6cfb9 {{(pid=62383) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1135.665080] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Triggering sync for uuid f7584d2c-5add-4764-9aed-22f7d1674854 {{(pid=62383) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10855}} [ 1135.665392] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "1b025655-acad-4b70-9e1a-489683cafb7e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.665619] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "1b025655-acad-4b70-9e1a-489683cafb7e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.665890] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.666088] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.666337] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.666557] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.666769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.666949] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.667204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "687912b8-40d2-4243-b31c-06107aa6cfb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.667426] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "f7584d2c-5add-4764-9aed-22f7d1674854" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1135.667626] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1135.667760] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1135.668434] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1584500-71c2-46a5-a660-9f6576fec0e0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.671145] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f92b44e-ce9b-4714-8db4-c96246fd098c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.673967] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd5e4d3-9fe2-46f0-b1a5-2e974c1c31e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.952024] env[62383]: DEBUG nova.scheduler.client.report [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.032015] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52664394-cb4b-b5f1-3366-e9f0331fa564, 'name': SearchDatastore_Task, 'duration_secs': 0.010224} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.032353] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.032617] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1136.032889] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f2d9436-271d-455e-a5f8-7f4c1c757bc4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.038883] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1136.038883] env[62383]: value = "task-2452427" [ 1136.038883] env[62383]: _type = "Task" [ 1136.038883] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.046338] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.145622] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452425, 'name': Rename_Task, 'duration_secs': 1.243583} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.145918] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1136.146186] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3ea6466b-1b21-4d46-8ff1-73871497878e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.151545] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1136.151545] env[62383]: value = "task-2452428" [ 1136.151545] env[62383]: _type = "Task" [ 1136.151545] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.159207] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.183490] env[62383]: DEBUG nova.compute.manager [req-24f2035e-fa6c-49be-8298-85403640339f req-b7638fbd-c09e-419d-84be-73ba65053d37 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-vif-plugged-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1136.183916] env[62383]: DEBUG oslo_concurrency.lockutils [req-24f2035e-fa6c-49be-8298-85403640339f req-b7638fbd-c09e-419d-84be-73ba65053d37 service nova] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.184322] env[62383]: DEBUG oslo_concurrency.lockutils [req-24f2035e-fa6c-49be-8298-85403640339f req-b7638fbd-c09e-419d-84be-73ba65053d37 service nova] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.184659] env[62383]: DEBUG oslo_concurrency.lockutils [req-24f2035e-fa6c-49be-8298-85403640339f req-b7638fbd-c09e-419d-84be-73ba65053d37 service nova] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.184971] env[62383]: DEBUG nova.compute.manager [req-24f2035e-fa6c-49be-8298-85403640339f req-b7638fbd-c09e-419d-84be-73ba65053d37 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] No waiting events found dispatching network-vif-plugged-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1136.185740] env[62383]: WARNING nova.compute.manager [req-24f2035e-fa6c-49be-8298-85403640339f req-b7638fbd-c09e-419d-84be-73ba65053d37 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received unexpected event network-vif-plugged-e822f89d-516c-4eab-bd54-f1369994f514 for instance with vm_state shelved_offloaded and task_state spawning. [ 1136.188360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.521s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.188360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "1b025655-acad-4b70-9e1a-489683cafb7e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.522s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.188360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.521s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.273366] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.273631] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.273856] env[62383]: DEBUG nova.network.neutron [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1136.456675] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.459216] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.091s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.460943] env[62383]: INFO nova.compute.claims [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1136.478962] env[62383]: INFO nova.scheduler.client.report [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted allocations for instance 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 [ 1136.551119] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452427, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458821} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.551348] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1136.551600] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1136.551890] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f032d0dc-bfb7-41cb-9334-c935665e0b1c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.558102] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1136.558102] env[62383]: value = "task-2452429" [ 1136.558102] env[62383]: _type = "Task" [ 1136.558102] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.565683] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.662922] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452428, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.986291] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f46848-5a4e-4fe9-8de2-64b972e9afa0 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.259s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1136.987164] env[62383]: DEBUG oslo_concurrency.lockutils [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] Acquired lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.988166] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5430e05c-a3e7-4299-8f13-de29f8382217 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.998239] env[62383]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1136.998471] env[62383]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62383) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1137.001770] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6395b18-a610-4cc9-a43d-c23750706fe1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.011909] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5644fe-5be2-44b1-9f3f-e761e7e7e52c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.047551] env[62383]: ERROR root [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-496579' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-496579' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-496579' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-496579'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-496579' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-496579' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-496579'}\n"]: nova.exception.InstanceNotFound: Instance 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 could not be found. [ 1137.047838] env[62383]: DEBUG oslo_concurrency.lockutils [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] Releasing lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.048093] env[62383]: DEBUG nova.compute.manager [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Detach interface failed, port_id=fb190575-295c-42fa-b8ba-fc6f19ccfa01, reason: Instance 8d9d6f3b-aef7-478a-a43e-3b621f1b3845 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1137.048328] env[62383]: DEBUG nova.compute.manager [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Received event network-vif-deleted-5ba29557-a079-4404-9449-eeff24a0a3e4 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1137.048544] env[62383]: INFO nova.compute.manager [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Neutron deleted interface 5ba29557-a079-4404-9449-eeff24a0a3e4; detaching it from the instance and deleting it from the info cache [ 1137.048794] env[62383]: DEBUG nova.network.neutron [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.050182] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.384s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.051060] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b61cc06-1d78-48f1-8fa0-9c6d840f2376 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.060140] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0938718-87bd-4fde-b6e1-b6c0d9ee0591 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.084385] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067525} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.084385] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1137.084680] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65e12e4-762f-4a0f-82e2-580635cb9186 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.117939] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1137.118664] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-221d32a8-96ac-497c-b57a-9bb9ebf8bb78 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.135096] env[62383]: DEBUG nova.network.neutron [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape822f89d-51", "ovs_interfaceid": "e822f89d-516c-4eab-bd54-f1369994f514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.142429] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1137.142429] env[62383]: value = "task-2452431" [ 1137.142429] env[62383]: _type = "Task" [ 1137.142429] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.151395] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.161897] env[62383]: DEBUG oslo_vmware.api [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452428, 'name': PowerOnVM_Task, 'duration_secs': 0.627348} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.162149] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1137.162406] env[62383]: INFO nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Took 9.71 seconds to spawn the instance on the hypervisor. [ 1137.162629] env[62383]: DEBUG nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.163453] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43122bb-86e1-45ec-bdd3-c2a4aad586c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.554715] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9532e9b2-2bb3-4309-ba1c-71ea59277d3f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.561704] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026cfe37-765e-4391-9901-c920b7d91021 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.588154] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a74ab46-a2ec-4c90-9043-f257898747d4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.604521] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff54adb7-8e86-4ce6-bfab-90d29f5e8b71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.607924] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "8d9d6f3b-aef7-478a-a43e-3b621f1b3845" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.558s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.608308] env[62383]: DEBUG nova.compute.manager [req-daf7d302-8ffb-4af5-b959-5e5c092adf72 req-8269f5b2-7b25-4f25-af0c-15d9ffcc10b9 service nova] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Detach interface failed, port_id=5ba29557-a079-4404-9449-eeff24a0a3e4, reason: Instance 2f028680-8db4-474a-8f24-880c4702877b could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1137.635322] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9716cc2-61ea-4909-a9d4-c0df9ff8f376 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.638217] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.649073] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098facf7-ac91-4f56-a267-f454c522f850 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.657429] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452431, 'name': ReconfigVM_Task, 'duration_secs': 0.283783} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.665195] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfigured VM instance instance-00000070 to attach disk [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.666074] env[62383]: DEBUG nova.compute.provider_tree [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.669028] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e6fb0d81-3f50-4bdb-8558-9c25176307f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.678930] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c1bcd82d6b3218fc217eabb68de88b42',container_format='bare',created_at=2025-02-11T15:33:48Z,direct_url=,disk_format='vmdk',id=cc264ce0-56c7-485b-8b5f-25bd2cbf6a47,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-396070697-shelved',owner='2439f3d802f34027b12d50f242a54ba3',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-02-11T15:34:03Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1137.679169] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1137.679322] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1137.679505] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1137.679651] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1137.679796] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1137.679996] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1137.680176] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1137.680389] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1137.680535] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1137.680712] env[62383]: DEBUG nova.virt.hardware [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1137.682548] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3452791e-ffc6-4118-8772-f59e2f03d815 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.686666] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1137.686666] env[62383]: value = "task-2452432" [ 1137.686666] env[62383]: _type = "Task" [ 1137.686666] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.690384] env[62383]: INFO nova.compute.manager [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Took 14.47 seconds to build instance. [ 1137.703843] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905af7c7-adf4-44b5-83c8-8f669d1b466f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.706962] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452432, 'name': Rename_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.720805] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:0a:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '07e9bef1-2b0e-4e4d-997f-de71bb0e213a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e822f89d-516c-4eab-bd54-f1369994f514', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1137.728711] env[62383]: DEBUG oslo.service.loopingcall [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1137.729262] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1137.729494] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05ad2c1e-f9bd-4454-a12f-09da5c459fa8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.750530] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1137.750530] env[62383]: value = "task-2452433" [ 1137.750530] env[62383]: _type = "Task" [ 1137.750530] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.757823] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452433, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.172741] env[62383]: DEBUG nova.scheduler.client.report [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.200094] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8fc8b2cc-ecc0-41ec-9b3d-65eba3473de8 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.990s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.200548] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452432, 'name': Rename_Task, 'duration_secs': 0.16204} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.201105] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.534s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.201296] env[62383]: INFO nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] During sync_power_state the instance has a pending task (spawning). Skip. [ 1138.201472] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.201691] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1138.202453] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdafb63c-32d3-4ffa-980d-afa58d7180b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.206431] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9652406-4394-4992-b843-6623d120e2c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.208978] env[62383]: DEBUG nova.compute.manager [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-changed-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1138.209151] env[62383]: DEBUG nova.compute.manager [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing instance network info cache due to event network-changed-e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1138.209357] env[62383]: DEBUG oslo_concurrency.lockutils [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.209539] env[62383]: DEBUG oslo_concurrency.lockutils [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.209655] env[62383]: DEBUG nova.network.neutron [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Refreshing network info cache for port e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1138.214741] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6ba18-c3a6-4289-8e3e-a6667d4de493 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Suspending the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1138.214955] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-43991918-fd70-431a-bb31-a7f4cdf3a45e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.217677] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1138.217677] env[62383]: value = "task-2452434" [ 1138.217677] env[62383]: _type = "Task" [ 1138.217677] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.224103] env[62383]: DEBUG oslo_vmware.api [None req-dad6ba18-c3a6-4289-8e3e-a6667d4de493 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1138.224103] env[62383]: value = "task-2452435" [ 1138.224103] env[62383]: _type = "Task" [ 1138.224103] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.226902] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.234431] env[62383]: DEBUG oslo_vmware.api [None req-dad6ba18-c3a6-4289-8e3e-a6667d4de493 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452435, 'name': SuspendVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.259676] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452433, 'name': CreateVM_Task, 'duration_secs': 0.33329} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.259848] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1138.260557] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.260716] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.261102] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1138.261350] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-819bc2ff-8b66-4642-84e8-4d41ec599273 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.265679] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1138.265679] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524897c4-f340-a910-e506-d28e23d8d1b2" [ 1138.265679] env[62383]: _type = "Task" [ 1138.265679] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.273133] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524897c4-f340-a910-e506-d28e23d8d1b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.678677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.219s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.679927] env[62383]: DEBUG nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1138.729235] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452434, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.738089] env[62383]: DEBUG oslo_vmware.api [None req-dad6ba18-c3a6-4289-8e3e-a6667d4de493 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452435, 'name': SuspendVM_Task} progress is 70%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.781331] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.781601] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Processing image cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1138.781902] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1138.782017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.782213] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1138.782504] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab158d7a-c550-4132-a209-4baecd3d2647 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.791625] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1138.792460] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1138.792663] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-360bc593-3004-4fd2-a250-929d3b81f9b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.798701] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1138.798701] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52faec0f-ceeb-c456-01bc-612b6a18313f" [ 1138.798701] env[62383]: _type = "Task" [ 1138.798701] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.806499] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52faec0f-ceeb-c456-01bc-612b6a18313f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.980302] env[62383]: DEBUG nova.network.neutron [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updated VIF entry in instance network info cache for port e822f89d-516c-4eab-bd54-f1369994f514. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1138.980302] env[62383]: DEBUG nova.network.neutron [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [{"id": "e822f89d-516c-4eab-bd54-f1369994f514", "address": "fa:16:3e:70:0a:80", "network": {"id": "257ad952-98f7-4f9d-803a-ebc613572141", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-540006814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2439f3d802f34027b12d50f242a54ba3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "07e9bef1-2b0e-4e4d-997f-de71bb0e213a", "external-id": "nsx-vlan-transportzone-786", "segmentation_id": 786, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape822f89d-51", "ovs_interfaceid": "e822f89d-516c-4eab-bd54-f1369994f514", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.133572] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.133852] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.185953] env[62383]: DEBUG nova.compute.utils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1139.187922] env[62383]: DEBUG nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1139.188150] env[62383]: DEBUG nova.network.neutron [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1139.225789] env[62383]: DEBUG nova.policy [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c294a0cc4e6446afabfb754ba2437a65', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83304cfb5deb443880252c194e249565', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1139.230531] env[62383]: DEBUG oslo_vmware.api [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452434, 'name': PowerOnVM_Task, 'duration_secs': 0.607611} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.233354] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1139.233553] env[62383]: INFO nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Took 7.37 seconds to spawn the instance on the hypervisor. [ 1139.233729] env[62383]: DEBUG nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.234457] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db32add5-852b-4633-9999-40df8e249e7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.241537] env[62383]: DEBUG oslo_vmware.api [None req-dad6ba18-c3a6-4289-8e3e-a6667d4de493 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452435, 'name': SuspendVM_Task, 'duration_secs': 0.847331} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.243087] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dad6ba18-c3a6-4289-8e3e-a6667d4de493 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Suspended the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1139.243386] env[62383]: DEBUG nova.compute.manager [None req-dad6ba18-c3a6-4289-8e3e-a6667d4de493 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.247633] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c71c1d0-8523-45cf-9b07-dc98cfb718a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.309063] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Preparing fetch location {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1139.309325] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Fetch image to [datastore2] OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5/OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5.vmdk {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1139.309533] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Downloading stream optimized image cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 to [datastore2] OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5/OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5.vmdk on the data store datastore2 as vApp {{(pid=62383) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1139.309707] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Downloading image file data cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 to the ESX as VM named 'OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5' {{(pid=62383) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1139.383330] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1139.383330] env[62383]: value = "resgroup-9" [ 1139.383330] env[62383]: _type = "ResourcePool" [ 1139.383330] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1139.383645] env[62383]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-61678938-4611-40b3-bfcc-f57da7038d9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.407136] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease: (returnval){ [ 1139.407136] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a69c53-c849-8848-1ebc-f266aa92df01" [ 1139.407136] env[62383]: _type = "HttpNfcLease" [ 1139.407136] env[62383]: } obtained for vApp import into resource pool (val){ [ 1139.407136] env[62383]: value = "resgroup-9" [ 1139.407136] env[62383]: _type = "ResourcePool" [ 1139.407136] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1139.407404] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the lease: (returnval){ [ 1139.407404] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a69c53-c849-8848-1ebc-f266aa92df01" [ 1139.407404] env[62383]: _type = "HttpNfcLease" [ 1139.407404] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1139.413530] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1139.413530] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a69c53-c849-8848-1ebc-f266aa92df01" [ 1139.413530] env[62383]: _type = "HttpNfcLease" [ 1139.413530] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1139.480784] env[62383]: DEBUG oslo_concurrency.lockutils [req-4c259817-14c9-4b9e-86a0-93979af89732 req-7126cd4a-9e1b-4600-aef5-f11504568383 service nova] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1139.504437] env[62383]: DEBUG nova.network.neutron [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Successfully created port: 158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1139.638680] env[62383]: DEBUG nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1139.688569] env[62383]: DEBUG nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1139.766558] env[62383]: INFO nova.compute.manager [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Took 15.16 seconds to build instance. [ 1139.915989] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1139.915989] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a69c53-c849-8848-1ebc-f266aa92df01" [ 1139.915989] env[62383]: _type = "HttpNfcLease" [ 1139.915989] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1140.047169] env[62383]: DEBUG nova.compute.manager [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Received event network-changed-fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.047270] env[62383]: DEBUG nova.compute.manager [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Refreshing instance network info cache due to event network-changed-fbc86f1d-2da8-4092-baac-7867624b1100. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1140.047770] env[62383]: DEBUG oslo_concurrency.lockutils [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] Acquiring lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.047770] env[62383]: DEBUG oslo_concurrency.lockutils [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] Acquired lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.047883] env[62383]: DEBUG nova.network.neutron [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Refreshing network info cache for port fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.165070] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.165345] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.169013] env[62383]: INFO nova.compute.claims [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1140.270794] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24747460-e51a-4449-b962-8e2f973661f8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.676s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.270794] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "f7584d2c-5add-4764-9aed-22f7d1674854" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.603s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.270794] env[62383]: INFO nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] During sync_power_state the instance has a pending task (spawning). Skip. [ 1140.270794] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "f7584d2c-5add-4764-9aed-22f7d1674854" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.417114] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1140.417114] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a69c53-c849-8848-1ebc-f266aa92df01" [ 1140.417114] env[62383]: _type = "HttpNfcLease" [ 1140.417114] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1140.417410] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1140.417410] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a69c53-c849-8848-1ebc-f266aa92df01" [ 1140.417410] env[62383]: _type = "HttpNfcLease" [ 1140.417410] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1140.418147] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec94f7f5-34b3-44a9-9cb6-a3af9392e40e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.425739] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527bd524-ffca-89fb-4cb4-08c05b086f62/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1140.425914] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527bd524-ffca-89fb-4cb4-08c05b086f62/disk-0.vmdk. {{(pid=62383) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1140.495621] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ac26f9e0-25fb-4a44-8def-9f1b7a8e5656 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.640466] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "687912b8-40d2-4243-b31c-06107aa6cfb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.641030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.641030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "687912b8-40d2-4243-b31c-06107aa6cfb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.641194] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.641261] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.643467] env[62383]: INFO nova.compute.manager [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Terminating instance [ 1140.698256] env[62383]: DEBUG nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1140.728178] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1140.728447] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1140.728638] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1140.728839] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1140.728970] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1140.729142] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1140.729366] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1140.729538] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1140.729715] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1140.729879] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1140.730077] env[62383]: DEBUG nova.virt.hardware [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1140.731014] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10ec2eb-2c6c-452d-8274-18b8336fdd21 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.744313] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c424494-0426-4627-b3e2-203d4d41fa44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.802806] env[62383]: DEBUG nova.network.neutron [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updated VIF entry in instance network info cache for port fbc86f1d-2da8-4092-baac-7867624b1100. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1140.803223] env[62383]: DEBUG nova.network.neutron [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.881753] env[62383]: DEBUG nova.compute.manager [req-dc34cf3d-4252-4214-8fe9-79a589021ed9 req-1d64d096-eefc-42e9-ac86-bb51de6c51a3 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Received event network-vif-plugged-158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1140.882166] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc34cf3d-4252-4214-8fe9-79a589021ed9 req-1d64d096-eefc-42e9-ac86-bb51de6c51a3 service nova] Acquiring lock "89033750-629f-4ddb-a309-56d50f798a8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.882413] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc34cf3d-4252-4214-8fe9-79a589021ed9 req-1d64d096-eefc-42e9-ac86-bb51de6c51a3 service nova] Lock "89033750-629f-4ddb-a309-56d50f798a8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.882521] env[62383]: DEBUG oslo_concurrency.lockutils [req-dc34cf3d-4252-4214-8fe9-79a589021ed9 req-1d64d096-eefc-42e9-ac86-bb51de6c51a3 service nova] Lock "89033750-629f-4ddb-a309-56d50f798a8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.882726] env[62383]: DEBUG nova.compute.manager [req-dc34cf3d-4252-4214-8fe9-79a589021ed9 req-1d64d096-eefc-42e9-ac86-bb51de6c51a3 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] No waiting events found dispatching network-vif-plugged-158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1140.882954] env[62383]: WARNING nova.compute.manager [req-dc34cf3d-4252-4214-8fe9-79a589021ed9 req-1d64d096-eefc-42e9-ac86-bb51de6c51a3 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Received unexpected event network-vif-plugged-158b7402-2cbc-46b2-a789-ada2ac1b29cd for instance with vm_state building and task_state spawning. [ 1140.979556] env[62383]: DEBUG nova.network.neutron [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Successfully updated port: 158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1141.148418] env[62383]: DEBUG nova.compute.manager [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1141.148418] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1141.148633] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4496a61-5b40-4d4a-b609-dd52ea86e9c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.156507] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1141.156772] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b0a6026-b31e-48a5-b515-8ec6714dcdab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.224920] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1141.225131] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1141.225355] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleting the datastore file [datastore2] 687912b8-40d2-4243-b31c-06107aa6cfb9 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.229884] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c728d311-9c3f-4550-91a9-19ce8ee48067 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.242155] env[62383]: DEBUG oslo_vmware.api [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1141.242155] env[62383]: value = "task-2452440" [ 1141.242155] env[62383]: _type = "Task" [ 1141.242155] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.254601] env[62383]: DEBUG oslo_vmware.api [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452440, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.306052] env[62383]: DEBUG oslo_concurrency.lockutils [req-d7879cf6-0e42-425f-94a3-6171b6ef1060 req-c35fae62-e711-47b5-8392-a43899b92881 service nova] Releasing lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.309504] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598f9f33-33db-40dd-9622-2f9a98810111 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.317280] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5368a301-ba31-40de-a673-1335f80f44ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.360440] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be3a117-65f1-451a-a5e3-4b6ec0a7b12d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.371810] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb149d0-5084-4d31-beec-8a09c4e0eab9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.388206] env[62383]: DEBUG nova.compute.provider_tree [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1141.483809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.483988] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.484179] env[62383]: DEBUG nova.network.neutron [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.741595] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Completed reading data from the image iterator. {{(pid=62383) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1141.741915] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527bd524-ffca-89fb-4cb4-08c05b086f62/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1141.742994] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b158fe-b707-4a59-9908-1ba7a06210a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.752911] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527bd524-ffca-89fb-4cb4-08c05b086f62/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1141.753167] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527bd524-ffca-89fb-4cb4-08c05b086f62/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1141.756337] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b566fcbe-c3c2-4421-96b1-375ec8e95bbc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.757784] env[62383]: DEBUG oslo_vmware.api [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452440, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226041} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.757988] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1141.758225] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1141.758411] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1141.758607] env[62383]: INFO nova.compute.manager [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1141.758879] env[62383]: DEBUG oslo.service.loopingcall [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1141.759414] env[62383]: DEBUG nova.compute.manager [-] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1141.759537] env[62383]: DEBUG nova.network.neutron [-] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1141.918590] env[62383]: ERROR nova.scheduler.client.report [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [req-242e25c0-012a-4145-b032-caceaf3a4090] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-242e25c0-012a-4145-b032-caceaf3a4090"}]} [ 1141.937496] env[62383]: DEBUG oslo_vmware.rw_handles [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527bd524-ffca-89fb-4cb4-08c05b086f62/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1141.937790] env[62383]: INFO nova.virt.vmwareapi.images [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Downloaded image file data cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 [ 1141.938742] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c02212-809c-4ec4-93e9-0737556bd541 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.943365] env[62383]: DEBUG nova.scheduler.client.report [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1141.965782] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7565225d-13a5-48ac-b7e6-1717e81fa7f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.974537] env[62383]: DEBUG nova.scheduler.client.report [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1141.974881] env[62383]: DEBUG nova.compute.provider_tree [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1141.985176] env[62383]: INFO nova.virt.vmwareapi.images [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] The imported VM was unregistered [ 1141.988013] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Caching image {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1141.988355] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Creating directory with path [datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1141.990522] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1092da8e-2e4e-4511-9162-d91ba0893d20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.996792] env[62383]: DEBUG nova.scheduler.client.report [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1142.001565] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Created directory with path [datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47 {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.001784] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5/OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5.vmdk to [datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47.vmdk. {{(pid=62383) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1142.002054] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c5a9a7c9-0008-4f8b-a914-b4da501156f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.009781] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1142.009781] env[62383]: value = "task-2452442" [ 1142.009781] env[62383]: _type = "Task" [ 1142.009781] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.017527] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452442, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.018452] env[62383]: DEBUG nova.scheduler.client.report [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1142.021244] env[62383]: DEBUG nova.network.neutron [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1142.084419] env[62383]: DEBUG oslo_concurrency.lockutils [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.084419] env[62383]: DEBUG oslo_concurrency.lockutils [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.144929] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f150431-0066-43f4-be31-d665d05e9e01 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.152125] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124a7422-c3e5-4f75-90fa-642a0334221e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.186921] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1190d49-4a58-448a-a486-43c0dea8c015 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.191174] env[62383]: DEBUG nova.network.neutron [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updating instance_info_cache with network_info: [{"id": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "address": "fa:16:3e:0f:74:4f", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap158b7402-2c", "ovs_interfaceid": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.198588] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ab0008-e9a8-4b86-9887-283ef827fb0f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.215318] env[62383]: DEBUG nova.compute.provider_tree [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1142.522708] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452442, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.585456] env[62383]: DEBUG nova.network.neutron [-] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.588924] env[62383]: INFO nova.compute.manager [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Detaching volume d01d3eba-40b0-4856-9209-d6e56c81b4a7 [ 1142.628309] env[62383]: INFO nova.virt.block_device [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Attempting to driver detach volume d01d3eba-40b0-4856-9209-d6e56c81b4a7 from mountpoint /dev/sdb [ 1142.628648] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1142.629047] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496589', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'name': 'volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'ec7c648d-10b0-480a-a5f0-4dab08d0049e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'serial': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1142.630078] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb652f9-ddcf-4c6f-add0-575039517ab3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.654177] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e2bde1-5359-4afd-a3a7-9eb8cbb51ac6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.663766] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83db5bb-225b-45c7-9579-d7f6589394ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.685730] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e98f1a-3ee4-4d8e-b789-f165a96b27ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.703111] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.703460] env[62383]: DEBUG nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Instance network_info: |[{"id": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "address": "fa:16:3e:0f:74:4f", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap158b7402-2c", "ovs_interfaceid": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1142.703814] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] The volume has not been displaced from its original location: [datastore2] volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7/volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1142.709549] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1142.709723] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:74:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '01fe2e08-46f6-4cee-aefd-934461f8077d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '158b7402-2cbc-46b2-a789-ada2ac1b29cd', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.717068] env[62383]: DEBUG oslo.service.loopingcall [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1142.717306] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72e97d69-a9ae-4a44-89eb-7946fe567ce4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.730479] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1142.733721] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abd12c4e-43ec-4860-bb7f-e1547b5055ce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.755811] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1142.755811] env[62383]: value = "task-2452443" [ 1142.755811] env[62383]: _type = "Task" [ 1142.755811] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.757264] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1142.757264] env[62383]: value = "task-2452444" [ 1142.757264] env[62383]: _type = "Task" [ 1142.757264] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.761468] env[62383]: DEBUG nova.scheduler.client.report [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1142.761728] env[62383]: DEBUG nova.compute.provider_tree [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 163 to 164 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1142.761907] env[62383]: DEBUG nova.compute.provider_tree [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1142.774015] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.777784] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452443, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.913532] env[62383]: DEBUG nova.compute.manager [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Received event network-changed-158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1142.913532] env[62383]: DEBUG nova.compute.manager [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Refreshing instance network info cache due to event network-changed-158b7402-2cbc-46b2-a789-ada2ac1b29cd. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1142.913772] env[62383]: DEBUG oslo_concurrency.lockutils [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] Acquiring lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.913945] env[62383]: DEBUG oslo_concurrency.lockutils [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] Acquired lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.914042] env[62383]: DEBUG nova.network.neutron [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Refreshing network info cache for port 158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1143.023364] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452442, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.089133] env[62383]: INFO nova.compute.manager [-] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Took 1.33 seconds to deallocate network for instance. [ 1143.272255] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.107s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.273201] env[62383]: DEBUG nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1143.285137] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452443, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.295170] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.521539] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452442, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.595628] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.595905] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.596137] env[62383]: DEBUG nova.objects.instance [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'resources' on Instance uuid 687912b8-40d2-4243-b31c-06107aa6cfb9 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.767039] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452443, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.771888] env[62383]: DEBUG nova.network.neutron [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updated VIF entry in instance network info cache for port 158b7402-2cbc-46b2-a789-ada2ac1b29cd. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1143.772335] env[62383]: DEBUG nova.network.neutron [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updating instance_info_cache with network_info: [{"id": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "address": "fa:16:3e:0f:74:4f", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap158b7402-2c", "ovs_interfaceid": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.777449] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.787474] env[62383]: DEBUG nova.compute.utils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1143.789533] env[62383]: DEBUG nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1143.789533] env[62383]: DEBUG nova.network.neutron [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1143.834849] env[62383]: DEBUG nova.policy [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1144.023143] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452442, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.237889] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4659318-0678-4ed8-bafb-e1a1c02d59a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.246593] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcf1ca6-475b-4be7-a2cb-0abcf6c8a8e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.283227] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deaf89e9-db11-45d4-a748-c51cb757fde1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.285718] env[62383]: DEBUG oslo_concurrency.lockutils [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] Releasing lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.285953] env[62383]: DEBUG nova.compute.manager [req-d1c5505c-4696-44e8-aace-f30e38ea75ca req-9607904a-3abd-4c3e-bc83-d66289fade8f service nova] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Received event network-vif-deleted-1dd1fea9-5c32-479b-879c-f6ca6cdcbc26 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1144.291071] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452443, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.296189] env[62383]: DEBUG nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1144.298861] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.300349] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cb5aa8-0fe9-457e-8ac0-72f9d8f3379c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.316244] env[62383]: DEBUG nova.compute.provider_tree [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.459495] env[62383]: DEBUG nova.network.neutron [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Successfully created port: 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1144.490602] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "12843fba-0240-44fb-9687-d34a6333011b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.490765] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.525268] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452442, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.350796} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.525638] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5/OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5.vmdk to [datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47.vmdk. [ 1144.525883] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Cleaning up location [datastore2] OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1144.526143] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_1cfa68e3-5348-4c58-852f-43e060e0a5e5 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.526468] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-905ae16b-455c-4b29-b98d-c9471f5ae3d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.533492] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1144.533492] env[62383]: value = "task-2452445" [ 1144.533492] env[62383]: _type = "Task" [ 1144.533492] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.541933] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.772163] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.792162] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452443, 'name': CreateVM_Task, 'duration_secs': 1.649949} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.792436] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452444, 'name': ReconfigVM_Task, 'duration_secs': 1.571259} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.792616] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1144.792865] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1144.798939] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e120ede-19f2-4dea-b745-1594c27b9ecc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.817375] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.817552] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.817868] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1144.818646] env[62383]: DEBUG nova.scheduler.client.report [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.821534] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-235658cc-98ee-45aa-8cdd-5f156c2c5cae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.824155] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1144.824155] env[62383]: value = "task-2452446" [ 1144.824155] env[62383]: _type = "Task" [ 1144.824155] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.828999] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1144.828999] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52d5523a-863b-10d1-10a4-7be45dfafbba" [ 1144.828999] env[62383]: _type = "Task" [ 1144.828999] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.835554] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.841913] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52d5523a-863b-10d1-10a4-7be45dfafbba, 'name': SearchDatastore_Task, 'duration_secs': 0.010943} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.842220] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.842456] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1144.842713] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.842892] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.843151] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1144.843524] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e356231-74f5-43b1-b557-824db10c16f2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.850934] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1144.851145] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1144.852094] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8b3836f-d4ea-464c-aab4-8ee8c086d05e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.857487] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1144.857487] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528a7258-c3b8-fdc4-cfb3-de95a36d04ea" [ 1144.857487] env[62383]: _type = "Task" [ 1144.857487] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.864705] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528a7258-c3b8-fdc4-cfb3-de95a36d04ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.994040] env[62383]: DEBUG nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1145.043608] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034049} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.044550] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1145.044550] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.044550] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47.vmdk to [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1145.044550] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7dd6780-0fa2-4a4c-bbf6-2915427c0d01 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.051906] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1145.051906] env[62383]: value = "task-2452447" [ 1145.051906] env[62383]: _type = "Task" [ 1145.051906] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.059607] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452447, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.239970] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.314319] env[62383]: DEBUG nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1145.325158] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.729s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.337655] env[62383]: DEBUG oslo_vmware.api [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452446, 'name': ReconfigVM_Task, 'duration_secs': 0.151253} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.339427] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496589', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'name': 'volume-d01d3eba-40b0-4856-9209-d6e56c81b4a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'ec7c648d-10b0-480a-a5f0-4dab08d0049e', 'attached_at': '', 'detached_at': '', 'volume_id': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7', 'serial': 'd01d3eba-40b0-4856-9209-d6e56c81b4a7'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1145.348148] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1145.348459] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1145.348639] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1145.348830] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1145.348977] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1145.349172] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1145.349392] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1145.349554] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1145.349721] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1145.349926] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1145.350064] env[62383]: DEBUG nova.virt.hardware [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1145.350970] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a90c175-d76a-41ae-b4e5-d4d9b513006a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.355157] env[62383]: INFO nova.scheduler.client.report [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted allocations for instance 687912b8-40d2-4243-b31c-06107aa6cfb9 [ 1145.368537] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50aab2ea-1bc9-47b0-9bbf-6f8497a4c02e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.378461] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528a7258-c3b8-fdc4-cfb3-de95a36d04ea, 'name': SearchDatastore_Task, 'duration_secs': 0.010581} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.387575] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31fc9cf6-dfb3-4f4d-8604-51147ddec25d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.395230] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1145.395230] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b850ae-56e7-1e6c-b83e-7f2c5499d1a2" [ 1145.395230] env[62383]: _type = "Task" [ 1145.395230] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.406820] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b850ae-56e7-1e6c-b83e-7f2c5499d1a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.513830] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.514130] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.515710] env[62383]: INFO nova.compute.claims [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1145.563787] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452447, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.743041] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.871289] env[62383]: DEBUG oslo_concurrency.lockutils [None req-320e9e76-eaa0-42d0-a418-bbdcd9e1e6c9 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "687912b8-40d2-4243-b31c-06107aa6cfb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.230s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.890450] env[62383]: DEBUG nova.objects.instance [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'flavor' on Instance uuid ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.907890] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b850ae-56e7-1e6c-b83e-7f2c5499d1a2, 'name': SearchDatastore_Task, 'duration_secs': 0.077247} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.908883] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.909251] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 89033750-629f-4ddb-a309-56d50f798a8d/89033750-629f-4ddb-a309-56d50f798a8d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1145.909561] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a48b317-fdbd-4e7f-a228-e3e3959a2bf5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.919981] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1145.919981] env[62383]: value = "task-2452448" [ 1145.919981] env[62383]: _type = "Task" [ 1145.919981] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.930773] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.066067] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452447, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.094448] env[62383]: DEBUG nova.compute.manager [req-1b77bfb6-aacd-47f8-a6df-e34ed49d7383 req-c3c92dbf-2177-46ba-a974-bb0320f4bc76 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-vif-plugged-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1146.094672] env[62383]: DEBUG oslo_concurrency.lockutils [req-1b77bfb6-aacd-47f8-a6df-e34ed49d7383 req-c3c92dbf-2177-46ba-a974-bb0320f4bc76 service nova] Acquiring lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.094914] env[62383]: DEBUG oslo_concurrency.lockutils [req-1b77bfb6-aacd-47f8-a6df-e34ed49d7383 req-c3c92dbf-2177-46ba-a974-bb0320f4bc76 service nova] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.095111] env[62383]: DEBUG oslo_concurrency.lockutils [req-1b77bfb6-aacd-47f8-a6df-e34ed49d7383 req-c3c92dbf-2177-46ba-a974-bb0320f4bc76 service nova] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.095285] env[62383]: DEBUG nova.compute.manager [req-1b77bfb6-aacd-47f8-a6df-e34ed49d7383 req-c3c92dbf-2177-46ba-a974-bb0320f4bc76 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] No waiting events found dispatching network-vif-plugged-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1146.095450] env[62383]: WARNING nova.compute.manager [req-1b77bfb6-aacd-47f8-a6df-e34ed49d7383 req-c3c92dbf-2177-46ba-a974-bb0320f4bc76 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received unexpected event network-vif-plugged-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee for instance with vm_state building and task_state spawning. [ 1146.170439] env[62383]: DEBUG nova.network.neutron [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Successfully updated port: 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1146.434317] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.568244] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452447, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.659241] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1dd96e-eede-434c-a6c2-5ddb780886db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.670551] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e2397d-1987-42e8-b6ed-9f79c5a4f362 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.674473] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1146.674627] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.674806] env[62383]: DEBUG nova.network.neutron [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1146.709554] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023c39d6-65ed-48ec-ac68-391901dd28d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.720388] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b224eef0-b7b5-43de-a0da-39079261de40 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.738218] env[62383]: DEBUG nova.compute.provider_tree [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1146.816702] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.816985] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.898204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-01dff363-147e-452e-b01e-520e6215e35a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.814s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.688810] env[62383]: DEBUG nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1147.705722] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452448, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.718575} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.707901] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 89033750-629f-4ddb-a309-56d50f798a8d/89033750-629f-4ddb-a309-56d50f798a8d.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1147.708140] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1147.708412] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452447, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.258396} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.713018] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27c7ff66-2efa-4717-a2ec-993f1408f70e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.713018] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47/cc264ce0-56c7-485b-8b5f-25bd2cbf6a47.vmdk to [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1147.713018] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fe1204-9ce8-4e97-acda-3814ed3f74b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.717961] env[62383]: ERROR nova.scheduler.client.report [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [req-54170ba6-7225-4228-b08c-b11fb3b749ef] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-54170ba6-7225-4228-b08c-b11fb3b749ef"}]} [ 1147.742067] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1147.742506] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1147.742506] env[62383]: value = "task-2452449" [ 1147.742506] env[62383]: _type = "Task" [ 1147.742506] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.743352] env[62383]: DEBUG nova.network.neutron [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1147.745664] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cd69bd4-5073-4860-a965-2c5b0a51959f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.760557] env[62383]: DEBUG nova.scheduler.client.report [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1147.771575] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452449, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.772849] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1147.772849] env[62383]: value = "task-2452450" [ 1147.772849] env[62383]: _type = "Task" [ 1147.772849] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.781363] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.782401] env[62383]: DEBUG nova.scheduler.client.report [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1147.782557] env[62383]: DEBUG nova.compute.provider_tree [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1147.793861] env[62383]: DEBUG nova.scheduler.client.report [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1147.814667] env[62383]: DEBUG nova.scheduler.client.report [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1147.925623] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff92697-62fb-4502-b4bc-392313d6b10c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.933593] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef483a7b-e10b-4735-9f49-8b45743a992e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.963576] env[62383]: DEBUG nova.network.neutron [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.965361] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4659a19-bb50-4d3e-8171-4a356320d387 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.972204] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77103ed1-66d8-4727-b872-0e8c92e8c9f5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.985959] env[62383]: DEBUG nova.compute.provider_tree [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.120537] env[62383]: DEBUG nova.compute.manager [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1148.120829] env[62383]: DEBUG nova.compute.manager [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing instance network info cache due to event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1148.121055] env[62383]: DEBUG oslo_concurrency.lockutils [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.200944] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.201266] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.201490] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.201697] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.201886] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.204816] env[62383]: INFO nova.compute.manager [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Terminating instance [ 1148.216280] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.268685] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068639} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.268951] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1148.269720] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c23772-4dc7-4df8-aa0c-ec04a7929a82 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.293308] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 89033750-629f-4ddb-a309-56d50f798a8d/89033750-629f-4ddb-a309-56d50f798a8d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.293930] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92d26f07-8327-4fa2-9252-daad93ddbe3b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.310941] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452450, 'name': ReconfigVM_Task, 'duration_secs': 0.259526} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.311559] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db/0392d059-57ea-49fb-84d2-b71cbca840db.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1148.312197] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b063a971-d122-4b9b-bfea-f8a505123ce0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.316040] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1148.316040] env[62383]: value = "task-2452451" [ 1148.316040] env[62383]: _type = "Task" [ 1148.316040] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.319665] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1148.319665] env[62383]: value = "task-2452452" [ 1148.319665] env[62383]: _type = "Task" [ 1148.319665] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.325671] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.330773] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452452, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.469033] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.469352] env[62383]: DEBUG nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Instance network_info: |[{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1148.469661] env[62383]: DEBUG oslo_concurrency.lockutils [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.469841] env[62383]: DEBUG nova.network.neutron [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1148.471246] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:d6:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1148.478790] env[62383]: DEBUG oslo.service.loopingcall [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1148.481694] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1148.482226] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17cd9bd3-842b-4dc1-ac43-1b9a5f4b2a4d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.500414] env[62383]: DEBUG nova.scheduler.client.report [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.508815] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1148.508815] env[62383]: value = "task-2452453" [ 1148.508815] env[62383]: _type = "Task" [ 1148.508815] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.518735] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452453, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.708784] env[62383]: DEBUG nova.compute.manager [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1148.709180] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1148.710201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e1d3a1-dcd6-4cd3-a1fe-486b6acce9c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.717835] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1148.717991] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e584bfe8-6641-437a-859f-393e08a5e6e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.724420] env[62383]: DEBUG oslo_vmware.api [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1148.724420] env[62383]: value = "task-2452454" [ 1148.724420] env[62383]: _type = "Task" [ 1148.724420] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.731980] env[62383]: DEBUG oslo_vmware.api [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.738740] env[62383]: DEBUG nova.network.neutron [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updated VIF entry in instance network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1148.739299] env[62383]: DEBUG nova.network.neutron [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.828548] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452451, 'name': ReconfigVM_Task, 'duration_secs': 0.408302} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.831438] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 89033750-629f-4ddb-a309-56d50f798a8d/89033750-629f-4ddb-a309-56d50f798a8d.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1148.832077] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452452, 'name': Rename_Task, 'duration_secs': 0.205942} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.832283] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec2d1f62-f13a-459a-ac5b-d7d9faa765d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.833708] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1148.833930] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-feaade55-1838-45ad-b27e-d81162b104af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.840612] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1148.840612] env[62383]: value = "task-2452455" [ 1148.840612] env[62383]: _type = "Task" [ 1148.840612] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.841705] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1148.841705] env[62383]: value = "task-2452456" [ 1148.841705] env[62383]: _type = "Task" [ 1148.841705] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.853283] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452455, 'name': Rename_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.855939] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452456, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.005878] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.492s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.006318] env[62383]: DEBUG nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1149.009071] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.266s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.009254] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.009408] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1149.009697] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.794s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.011091] env[62383]: INFO nova.compute.claims [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1149.014390] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1cb4e0-44e8-4b66-8f9e-03d6459e08b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.029672] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb1f79a-5544-46f5-9f84-e917521afd9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.033343] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452453, 'name': CreateVM_Task, 'duration_secs': 0.366698} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.033502] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1149.034456] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.034617] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.034931] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1149.035212] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e41a1d-173a-4302-a82c-7f60cc5a76b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.046321] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93012ebe-3605-4eca-a39c-46db6a988025 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.050347] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1149.050347] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a37fc9-460d-6691-4b80-edfadd00b0e8" [ 1149.050347] env[62383]: _type = "Task" [ 1149.050347] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.056664] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d48859-492b-4a34-8c2b-2646dc789ab2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.064127] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a37fc9-460d-6691-4b80-edfadd00b0e8, 'name': SearchDatastore_Task, 'duration_secs': 0.008698} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.064658] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.064880] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1149.065117] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.065276] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.065447] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.065668] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10f8e8e9-2b06-4d38-bdb5-f19a1346b0b4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.090026] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179928MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1149.090186] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.095921] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.096110] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1149.096798] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e22d792d-fe13-46ca-8c15-8316befff6fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.101571] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1149.101571] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52340a6e-a3db-c2fc-cd86-833521806fde" [ 1149.101571] env[62383]: _type = "Task" [ 1149.101571] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.108677] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52340a6e-a3db-c2fc-cd86-833521806fde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.234792] env[62383]: DEBUG oslo_vmware.api [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452454, 'name': PowerOffVM_Task, 'duration_secs': 0.207797} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.235094] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1149.235278] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.235522] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75dad349-e565-4e5f-8d97-069bd618ef27 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.242107] env[62383]: DEBUG oslo_concurrency.lockutils [req-3657f9e5-27f9-48f3-8cbd-a1705d364255 req-a5452052-4f51-4e6b-a475-9b649c4c89c6 service nova] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.321970] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.321970] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.322209] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleting the datastore file [datastore2] ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.322402] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94e65dfe-ec21-4dd1-972c-bf8bf4d60f24 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.328656] env[62383]: DEBUG oslo_vmware.api [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1149.328656] env[62383]: value = "task-2452458" [ 1149.328656] env[62383]: _type = "Task" [ 1149.328656] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.336478] env[62383]: DEBUG oslo_vmware.api [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.353193] env[62383]: DEBUG oslo_vmware.api [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452456, 'name': PowerOnVM_Task, 'duration_secs': 0.442581} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.356110] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1149.358034] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452455, 'name': Rename_Task, 'duration_secs': 0.155011} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.358464] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1149.358698] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-866191b6-1167-4c29-a64c-29493dba3571 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.364699] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1149.364699] env[62383]: value = "task-2452459" [ 1149.364699] env[62383]: _type = "Task" [ 1149.364699] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.373027] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452459, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.459054] env[62383]: DEBUG nova.compute.manager [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1149.459204] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9379f63-556c-4f0b-b783-a47da53dcb32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.515484] env[62383]: DEBUG nova.compute.utils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1149.519537] env[62383]: DEBUG nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1149.519715] env[62383]: DEBUG nova.network.neutron [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1149.560394] env[62383]: DEBUG nova.policy [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4de9dec9c1d2474eb611f4a2623d602d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aead8ea1d1de4d0d8d8c07dec519d8b4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1149.611122] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52340a6e-a3db-c2fc-cd86-833521806fde, 'name': SearchDatastore_Task, 'duration_secs': 0.007462} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.611888] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2165d9c4-0698-4f99-a8dd-40a684d4728b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.616827] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1149.616827] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5269fb22-481c-8a84-4807-f491a14858b8" [ 1149.616827] env[62383]: _type = "Task" [ 1149.616827] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.625461] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5269fb22-481c-8a84-4807-f491a14858b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.838511] env[62383]: DEBUG oslo_vmware.api [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232461} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.838757] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.838939] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.839126] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.839298] env[62383]: INFO nova.compute.manager [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1149.839530] env[62383]: DEBUG oslo.service.loopingcall [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1149.839725] env[62383]: DEBUG nova.compute.manager [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1149.839806] env[62383]: DEBUG nova.network.neutron [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1149.875134] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452459, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.948773] env[62383]: DEBUG nova.network.neutron [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Successfully created port: 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1149.979966] env[62383]: DEBUG oslo_concurrency.lockutils [None req-6936d857-e44f-4429-89bc-cd4e394e9b97 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.842s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.980898] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.314s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.981085] env[62383]: INFO nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] During sync_power_state the instance has a pending task (spawning). Skip. [ 1149.981276] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.020284] env[62383]: DEBUG nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1150.130395] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5269fb22-481c-8a84-4807-f491a14858b8, 'name': SearchDatastore_Task, 'duration_secs': 0.011797} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.130897] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1150.131096] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ac4e173d-fec9-4a0f-b9b6-ad83a98989e7/ac4e173d-fec9-4a0f-b9b6-ad83a98989e7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1150.131192] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9e0dcb6-5fdc-453f-8553-278dac311e3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.140780] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1150.140780] env[62383]: value = "task-2452460" [ 1150.140780] env[62383]: _type = "Task" [ 1150.140780] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.149970] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.195905] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43417c5f-6a51-492c-83af-ca5b018c4b03 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.204102] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6261a7-30d9-40dd-b228-4b55418dd9c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.235627] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9127ab-cb9b-48b7-9f89-4b84642067cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.244043] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd786398-f1ea-4224-9fc7-1524492aa5c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.260040] env[62383]: DEBUG nova.compute.provider_tree [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.346573] env[62383]: DEBUG nova.compute.manager [req-f5826b2d-a112-4891-b00c-98f73c86cb7c req-0e16ab92-0556-4b2a-911a-c6bfc50afd2d service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Received event network-vif-deleted-275a086a-5096-4414-8397-af9ac5331f87 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1150.346695] env[62383]: INFO nova.compute.manager [req-f5826b2d-a112-4891-b00c-98f73c86cb7c req-0e16ab92-0556-4b2a-911a-c6bfc50afd2d service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Neutron deleted interface 275a086a-5096-4414-8397-af9ac5331f87; detaching it from the instance and deleting it from the info cache [ 1150.346872] env[62383]: DEBUG nova.network.neutron [req-f5826b2d-a112-4891-b00c-98f73c86cb7c req-0e16ab92-0556-4b2a-911a-c6bfc50afd2d service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.379551] env[62383]: DEBUG oslo_vmware.api [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452459, 'name': PowerOnVM_Task, 'duration_secs': 0.943068} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.380915] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.380915] env[62383]: INFO nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Took 9.68 seconds to spawn the instance on the hypervisor. [ 1150.380915] env[62383]: DEBUG nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.381201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686c3303-fce4-4bc3-a630-d157bfc0824c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.532196] env[62383]: INFO nova.virt.block_device [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Booting with volume 24d05613-a175-4448-bd5a-122c9a2e08ac at /dev/sda [ 1150.576790] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73c92e20-2256-480d-851b-cf041c517bc3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.586656] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00de0bf1-7af8-442a-88ac-43df24e55754 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.620506] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab116b40-5549-41e4-8fa6-f4eb245babfe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.628671] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6200234-b59e-4e95-b157-2e342e172812 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.655303] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452460, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.673688] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c7ada0-c684-478d-bc1f-40cdf293ab24 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.682156] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2176e700-44c6-4b98-b6a6-02af9d773443 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.695320] env[62383]: DEBUG nova.virt.block_device [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating existing volume attachment record: 3f7b0d14-856f-40eb-ac58-e5b00775b75a {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1150.763249] env[62383]: DEBUG nova.scheduler.client.report [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.796705] env[62383]: DEBUG nova.network.neutron [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.851444] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1298f7b-3051-4bd4-8825-49cddae359a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.858684] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66209c1f-3caa-4187-abb5-81cd7ff92593 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.873850] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.874317] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.876337] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.876337] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.876337] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.878327] env[62383]: INFO nova.compute.manager [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Terminating instance [ 1150.903046] env[62383]: DEBUG nova.compute.manager [req-f5826b2d-a112-4891-b00c-98f73c86cb7c req-0e16ab92-0556-4b2a-911a-c6bfc50afd2d service nova] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Detach interface failed, port_id=275a086a-5096-4414-8397-af9ac5331f87, reason: Instance ec7c648d-10b0-480a-a5f0-4dab08d0049e could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1150.903046] env[62383]: DEBUG nova.compute.manager [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1150.903046] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1150.906765] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd374a71-d67e-4e1c-8563-d8fc93fc708a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.911773] env[62383]: INFO nova.compute.manager [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Took 15.56 seconds to build instance. [ 1150.918017] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1150.918266] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49e9744c-5c77-417d-be06-ca3c6292ff50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.925815] env[62383]: DEBUG oslo_vmware.api [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1150.925815] env[62383]: value = "task-2452461" [ 1150.925815] env[62383]: _type = "Task" [ 1150.925815] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.936168] env[62383]: DEBUG oslo_vmware.api [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.151606] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562368} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.151882] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ac4e173d-fec9-4a0f-b9b6-ad83a98989e7/ac4e173d-fec9-4a0f-b9b6-ad83a98989e7.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1151.152124] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1151.152374] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cba9228e-525f-467f-8f73-6050ed13573b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.157552] env[62383]: DEBUG nova.compute.manager [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Received event network-changed-158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1151.157749] env[62383]: DEBUG nova.compute.manager [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Refreshing instance network info cache due to event network-changed-158b7402-2cbc-46b2-a789-ada2ac1b29cd. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1151.157961] env[62383]: DEBUG oslo_concurrency.lockutils [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] Acquiring lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.158116] env[62383]: DEBUG oslo_concurrency.lockutils [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] Acquired lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.158278] env[62383]: DEBUG nova.network.neutron [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Refreshing network info cache for port 158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.164759] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1151.164759] env[62383]: value = "task-2452462" [ 1151.164759] env[62383]: _type = "Task" [ 1151.164759] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.173475] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.268321] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.269038] env[62383]: DEBUG nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1151.271361] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.181s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.299494] env[62383]: INFO nova.compute.manager [-] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Took 1.46 seconds to deallocate network for instance. [ 1151.413976] env[62383]: DEBUG oslo_concurrency.lockutils [None req-8af0467e-7c3c-4d53-8003-ded8a5b33777 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.075s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.441529] env[62383]: DEBUG oslo_vmware.api [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452461, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.536279] env[62383]: DEBUG nova.network.neutron [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Successfully updated port: 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1151.675026] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062934} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.675026] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1151.675543] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a36f77-40bb-48d2-878a-f7078d443ba0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.699136] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] ac4e173d-fec9-4a0f-b9b6-ad83a98989e7/ac4e173d-fec9-4a0f-b9b6-ad83a98989e7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.701777] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36188996-08d3-4ed1-80a5-4db4fd102342 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.721854] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1151.721854] env[62383]: value = "task-2452463" [ 1151.721854] env[62383]: _type = "Task" [ 1151.721854] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.730368] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.776975] env[62383]: DEBUG nova.compute.utils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1151.780898] env[62383]: DEBUG nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1151.781096] env[62383]: DEBUG nova.network.neutron [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1151.809030] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.826077] env[62383]: DEBUG nova.policy [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc391aae95a8405bab7801175514ac8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c15955328966463fa09401a270d95fe0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1151.931713] env[62383]: DEBUG nova.network.neutron [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updated VIF entry in instance network info cache for port 158b7402-2cbc-46b2-a789-ada2ac1b29cd. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.932096] env[62383]: DEBUG nova.network.neutron [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updating instance_info_cache with network_info: [{"id": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "address": "fa:16:3e:0f:74:4f", "network": {"id": "eb2c6ec5-1f5f-43b4-b628-5a79ef2b6365", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1973918213-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83304cfb5deb443880252c194e249565", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "01fe2e08-46f6-4cee-aefd-934461f8077d", "external-id": "nsx-vlan-transportzone-806", "segmentation_id": 806, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap158b7402-2c", "ovs_interfaceid": "158b7402-2cbc-46b2-a789-ada2ac1b29cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.938759] env[62383]: DEBUG oslo_vmware.api [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452461, 'name': PowerOffVM_Task, 'duration_secs': 0.572925} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.939054] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.939263] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1151.939601] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ba37ae8-6d0c-4ac4-838d-127a28bd72e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.012101] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1152.012357] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1152.012542] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleting the datastore file [datastore2] 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1152.012870] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-784bd24a-d042-4c99-af82-641e096acba7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.019437] env[62383]: DEBUG oslo_vmware.api [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for the task: (returnval){ [ 1152.019437] env[62383]: value = "task-2452465" [ 1152.019437] env[62383]: _type = "Task" [ 1152.019437] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.027574] env[62383]: DEBUG oslo_vmware.api [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.039277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1152.039427] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.039603] env[62383]: DEBUG nova.network.neutron [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.084864] env[62383]: DEBUG nova.network.neutron [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Successfully created port: cbb2fb29-a758-4def-88a8-416db1bb8301 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1152.233264] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452463, 'name': ReconfigVM_Task, 'duration_secs': 0.259488} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.233532] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Reconfigured VM instance instance-00000072 to attach disk [datastore2] ac4e173d-fec9-4a0f-b9b6-ad83a98989e7/ac4e173d-fec9-4a0f-b9b6-ad83a98989e7.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.234209] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09dc5ab2-57db-4504-be33-c0a9b04b1a36 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.240049] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1152.240049] env[62383]: value = "task-2452466" [ 1152.240049] env[62383]: _type = "Task" [ 1152.240049] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.247532] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452466, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.281872] env[62383]: DEBUG nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1152.308941] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1b025655-acad-4b70-9e1a-489683cafb7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309118] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ec7c648d-10b0-480a-a5f0-4dab08d0049e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309246] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c94e9a83-04de-4144-ab6e-d96dc7c39e6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309367] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance f7584d2c-5add-4764-9aed-22f7d1674854 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309487] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 0392d059-57ea-49fb-84d2-b71cbca840db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309599] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 89033750-629f-4ddb-a309-56d50f798a8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309712] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309822] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 12843fba-0240-44fb-9687-d34a6333011b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.309930] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance b6ed40a8-674f-4179-8642-848ab0a2d31b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.310153] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1152.310292] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1152.419464] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9fdb75-4c1f-498e-b571-eb34327afdeb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.426923] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905231c4-d1a5-48a1-bb8f-e9da2c79b9c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.455518] env[62383]: DEBUG oslo_concurrency.lockutils [req-607ee09b-859f-4e1b-9133-8425aa251caa req-f63ebf46-4b69-4d7b-b0bf-2c0306e201e8 service nova] Releasing lock "refresh_cache-89033750-629f-4ddb-a309-56d50f798a8d" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.456905] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d939cb86-c8c2-4d30-b4cd-eba665bcda58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.463522] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e0b279-c59c-46d5-bcfc-6094494713a2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.476281] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.528615] env[62383]: DEBUG oslo_vmware.api [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Task: {'id': task-2452465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159353} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.529087] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.529087] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.529315] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.529423] env[62383]: INFO nova.compute.manager [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1152.529663] env[62383]: DEBUG oslo.service.loopingcall [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.529849] env[62383]: DEBUG nova.compute.manager [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1152.530013] env[62383]: DEBUG nova.network.neutron [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1152.571563] env[62383]: DEBUG nova.network.neutron [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1152.706754] env[62383]: DEBUG nova.network.neutron [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [{"id": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "address": "fa:16:3e:67:44:eb", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a7f4f55-9c", "ovs_interfaceid": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.752127] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452466, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.782198] env[62383]: DEBUG nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1152.782198] env[62383]: DEBUG nova.virt.hardware [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1152.784344] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e3d87f-ea98-49bf-b80c-9e72e5bf020b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.796485] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7bd8a76-e60b-4837-8c72-de6070aaacc1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.979152] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.209416] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.209746] env[62383]: DEBUG nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Instance network_info: |[{"id": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "address": "fa:16:3e:67:44:eb", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a7f4f55-9c", "ovs_interfaceid": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1153.211401] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:44:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.219429] env[62383]: DEBUG oslo.service.loopingcall [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1153.220938] env[62383]: DEBUG nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Received event network-vif-plugged-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1153.221165] env[62383]: DEBUG oslo_concurrency.lockutils [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] Acquiring lock "12843fba-0240-44fb-9687-d34a6333011b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.221431] env[62383]: DEBUG oslo_concurrency.lockutils [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] Lock "12843fba-0240-44fb-9687-d34a6333011b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.221565] env[62383]: DEBUG oslo_concurrency.lockutils [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] Lock "12843fba-0240-44fb-9687-d34a6333011b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.221727] env[62383]: DEBUG nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] No waiting events found dispatching network-vif-plugged-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1153.221904] env[62383]: WARNING nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Received unexpected event network-vif-plugged-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 for instance with vm_state building and task_state spawning. [ 1153.222126] env[62383]: DEBUG nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Received event network-changed-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1153.222290] env[62383]: DEBUG nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Refreshing instance network info cache due to event network-changed-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1153.222471] env[62383]: DEBUG oslo_concurrency.lockutils [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] Acquiring lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.222606] env[62383]: DEBUG oslo_concurrency.lockutils [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] Acquired lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.222822] env[62383]: DEBUG nova.network.neutron [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Refreshing network info cache for port 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.224227] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.224644] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be094ca2-1dda-487d-b06b-9126d2ca79fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.245810] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.245810] env[62383]: value = "task-2452467" [ 1153.245810] env[62383]: _type = "Task" [ 1153.245810] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.252238] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452466, 'name': Rename_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.256945] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452467, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.292547] env[62383]: DEBUG nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1153.319309] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1153.319595] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.319759] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1153.319947] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.320116] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1153.320268] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1153.320478] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1153.320775] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1153.321042] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1153.321234] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1153.321418] env[62383]: DEBUG nova.virt.hardware [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1153.322291] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fe6a95-b6f3-4b23-8ee7-689d79b95f58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.330497] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f1a96d-e22b-4d06-9cb8-dc0a5fddd4d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.445662] env[62383]: DEBUG nova.compute.manager [req-a41dc29c-74ec-4f22-b72d-d9afc5431968 req-2b1d1484-0afd-4b88-bf58-970bdbd3964e service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Received event network-vif-plugged-cbb2fb29-a758-4def-88a8-416db1bb8301 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1153.445931] env[62383]: DEBUG oslo_concurrency.lockutils [req-a41dc29c-74ec-4f22-b72d-d9afc5431968 req-2b1d1484-0afd-4b88-bf58-970bdbd3964e service nova] Acquiring lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.446161] env[62383]: DEBUG oslo_concurrency.lockutils [req-a41dc29c-74ec-4f22-b72d-d9afc5431968 req-2b1d1484-0afd-4b88-bf58-970bdbd3964e service nova] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.446369] env[62383]: DEBUG oslo_concurrency.lockutils [req-a41dc29c-74ec-4f22-b72d-d9afc5431968 req-2b1d1484-0afd-4b88-bf58-970bdbd3964e service nova] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.446515] env[62383]: DEBUG nova.compute.manager [req-a41dc29c-74ec-4f22-b72d-d9afc5431968 req-2b1d1484-0afd-4b88-bf58-970bdbd3964e service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] No waiting events found dispatching network-vif-plugged-cbb2fb29-a758-4def-88a8-416db1bb8301 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1153.446694] env[62383]: WARNING nova.compute.manager [req-a41dc29c-74ec-4f22-b72d-d9afc5431968 req-2b1d1484-0afd-4b88-bf58-970bdbd3964e service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Received unexpected event network-vif-plugged-cbb2fb29-a758-4def-88a8-416db1bb8301 for instance with vm_state building and task_state spawning. [ 1153.486417] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1153.486726] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.215s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.486927] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.678s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.487231] env[62383]: DEBUG nova.objects.instance [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'resources' on Instance uuid ec7c648d-10b0-480a-a5f0-4dab08d0049e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.535552] env[62383]: DEBUG nova.network.neutron [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Successfully updated port: cbb2fb29-a758-4def-88a8-416db1bb8301 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1153.585918] env[62383]: DEBUG nova.network.neutron [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.753323] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452466, 'name': Rename_Task, 'duration_secs': 1.17691} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.755984] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1153.756264] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-797deb4f-467e-4e46-b5b9-90ab44ed711c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.760737] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452467, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.766092] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1153.766092] env[62383]: value = "task-2452468" [ 1153.766092] env[62383]: _type = "Task" [ 1153.766092] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.774252] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452468, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.928551] env[62383]: DEBUG nova.network.neutron [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updated VIF entry in instance network info cache for port 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1153.928926] env[62383]: DEBUG nova.network.neutron [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [{"id": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "address": "fa:16:3e:67:44:eb", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a7f4f55-9c", "ovs_interfaceid": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.038488] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1154.038694] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.038854] env[62383]: DEBUG nova.network.neutron [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1154.088782] env[62383]: INFO nova.compute.manager [-] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Took 1.56 seconds to deallocate network for instance. [ 1154.108230] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f37011-59b6-438b-abdc-416f86eb4fe4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.116265] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc4d0fb-2ca4-4c1a-b6ba-81601ff7cc6e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.147929] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03353d1a-8f41-47a5-82ca-fe6d236d1d34 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.155785] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b9c0ac-aaf3-4346-84d3-9661836cd0e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.170441] env[62383]: DEBUG nova.compute.provider_tree [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.256660] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452467, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.275016] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452468, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.431498] env[62383]: DEBUG oslo_concurrency.lockutils [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] Releasing lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1154.431787] env[62383]: DEBUG nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Received event network-vif-deleted-e822f89d-516c-4eab-bd54-f1369994f514 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1154.432021] env[62383]: INFO nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Neutron deleted interface e822f89d-516c-4eab-bd54-f1369994f514; detaching it from the instance and deleting it from the info cache [ 1154.432240] env[62383]: DEBUG nova.network.neutron [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.484166] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.484396] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.484544] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1154.577191] env[62383]: DEBUG nova.network.neutron [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1154.595951] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.673906] env[62383]: DEBUG nova.scheduler.client.report [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1154.710583] env[62383]: DEBUG nova.network.neutron [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance_info_cache with network_info: [{"id": "cbb2fb29-a758-4def-88a8-416db1bb8301", "address": "fa:16:3e:8a:55:fa", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbb2fb29-a7", "ovs_interfaceid": "cbb2fb29-a758-4def-88a8-416db1bb8301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.758064] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452467, 'name': CreateVM_Task, 'duration_secs': 1.44706} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.758253] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1154.758921] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': '3f7b0d14-856f-40eb-ac58-e5b00775b75a', 'device_type': None, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496603', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'name': 'volume-24d05613-a175-4448-bd5a-122c9a2e08ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '12843fba-0240-44fb-9687-d34a6333011b', 'attached_at': '', 'detached_at': '', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'serial': '24d05613-a175-4448-bd5a-122c9a2e08ac'}, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62383) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1154.759150] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Root volume attach. Driver type: vmdk {{(pid=62383) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1154.759888] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612bfb5d-778e-4cb8-8426-48230d29bf3a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.767415] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0b399e-44b0-478a-9de3-75419905d48d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.778806] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ca73e6-e448-4628-9f6e-15a9ea153712 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.781268] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452468, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.784921] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-17f04b43-c310-4097-8482-a2a12669085f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.791467] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1154.791467] env[62383]: value = "task-2452469" [ 1154.791467] env[62383]: _type = "Task" [ 1154.791467] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.798757] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452469, 'name': RelocateVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.935481] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-837a186f-1279-48ee-9134-c6df9d57719a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.944924] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30832ede-3d79-4e90-9b5f-7dc02bf2e56a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.975731] env[62383]: DEBUG nova.compute.manager [req-39d2fd57-7150-449c-94e0-e2c8086f5e41 req-63352b8a-8a1e-4b2f-9d39-634ed4202691 service nova] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Detach interface failed, port_id=e822f89d-516c-4eab-bd54-f1369994f514, reason: Instance 0392d059-57ea-49fb-84d2-b71cbca840db could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1155.180930] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.694s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.183331] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.588s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.183568] env[62383]: DEBUG nova.objects.instance [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lazy-loading 'resources' on Instance uuid 0392d059-57ea-49fb-84d2-b71cbca840db {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1155.201600] env[62383]: INFO nova.scheduler.client.report [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted allocations for instance ec7c648d-10b0-480a-a5f0-4dab08d0049e [ 1155.213377] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.213685] env[62383]: DEBUG nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Instance network_info: |[{"id": "cbb2fb29-a758-4def-88a8-416db1bb8301", "address": "fa:16:3e:8a:55:fa", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbb2fb29-a7", "ovs_interfaceid": "cbb2fb29-a758-4def-88a8-416db1bb8301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1155.214127] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:55:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cbb2fb29-a758-4def-88a8-416db1bb8301', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1155.221664] env[62383]: DEBUG oslo.service.loopingcall [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1155.221879] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1155.222110] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dcbedd26-8597-472f-9a20-3edda288f46e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.242984] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1155.242984] env[62383]: value = "task-2452470" [ 1155.242984] env[62383]: _type = "Task" [ 1155.242984] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.254582] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452470, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.279744] env[62383]: DEBUG oslo_vmware.api [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452468, 'name': PowerOnVM_Task, 'duration_secs': 1.304316} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.280048] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1155.280225] env[62383]: INFO nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Took 9.97 seconds to spawn the instance on the hypervisor. [ 1155.280407] env[62383]: DEBUG nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1155.281317] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02bd9ad-b099-4b7a-97b1-f29aa40608ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.300856] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452469, 'name': RelocateVM_Task, 'duration_secs': 0.376234} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.300856] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1155.300856] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496603', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'name': 'volume-24d05613-a175-4448-bd5a-122c9a2e08ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '12843fba-0240-44fb-9687-d34a6333011b', 'attached_at': '', 'detached_at': '', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'serial': '24d05613-a175-4448-bd5a-122c9a2e08ac'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1155.301552] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8acfc2-c064-432d-bd00-4c58a5127d3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.318409] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa512ee-2786-44f6-8ad2-c74dd9226e9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.343078] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-24d05613-a175-4448-bd5a-122c9a2e08ac/volume-24d05613-a175-4448-bd5a-122c9a2e08ac.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.343637] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e68f6eb0-ce64-42d5-a132-faeace6cc522 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.364191] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1155.364191] env[62383]: value = "task-2452471" [ 1155.364191] env[62383]: _type = "Task" [ 1155.364191] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.373241] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452471, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.474514] env[62383]: DEBUG nova.compute.manager [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Received event network-changed-cbb2fb29-a758-4def-88a8-416db1bb8301 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1155.474514] env[62383]: DEBUG nova.compute.manager [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Refreshing instance network info cache due to event network-changed-cbb2fb29-a758-4def-88a8-416db1bb8301. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1155.474514] env[62383]: DEBUG oslo_concurrency.lockutils [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] Acquiring lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.474623] env[62383]: DEBUG oslo_concurrency.lockutils [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] Acquired lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.474748] env[62383]: DEBUG nova.network.neutron [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Refreshing network info cache for port cbb2fb29-a758-4def-88a8-416db1bb8301 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1155.708213] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0ebced3b-1178-4491-84db-8be38f83f1bb tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "ec7c648d-10b0-480a-a5f0-4dab08d0049e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.507s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.752969] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452470, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.796098] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd9e099-5a9f-4830-b40a-d1d43b96a9a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.800994] env[62383]: INFO nova.compute.manager [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Took 15.66 seconds to build instance. [ 1155.805302] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8da7e0-ca0b-40f6-8cd2-fb46cf8345af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.835230] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1973becf-736d-440a-a0f5-319caf24e2df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.843631] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60699ca1-8900-4af9-908f-d093156525e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.857122] env[62383]: DEBUG nova.compute.provider_tree [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.872878] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452471, 'name': ReconfigVM_Task, 'duration_secs': 0.259369} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.873161] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-24d05613-a175-4448-bd5a-122c9a2e08ac/volume-24d05613-a175-4448-bd5a-122c9a2e08ac.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1155.878018] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1aa1bb22-a219-4b8b-8825-60a14236c239 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.892696] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1155.892696] env[62383]: value = "task-2452472" [ 1155.892696] env[62383]: _type = "Task" [ 1155.892696] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.903579] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452472, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.181893] env[62383]: DEBUG nova.network.neutron [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updated VIF entry in instance network info cache for port cbb2fb29-a758-4def-88a8-416db1bb8301. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1156.182354] env[62383]: DEBUG nova.network.neutron [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance_info_cache with network_info: [{"id": "cbb2fb29-a758-4def-88a8-416db1bb8301", "address": "fa:16:3e:8a:55:fa", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbb2fb29-a7", "ovs_interfaceid": "cbb2fb29-a758-4def-88a8-416db1bb8301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.215856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.216078] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.216573] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1156.255201] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452470, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.303388] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dfcd17bf-f1b4-477a-935c-6ce0f39b0152 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.169s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.360014] env[62383]: DEBUG nova.scheduler.client.report [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.402119] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452472, 'name': ReconfigVM_Task, 'duration_secs': 0.30091} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.402435] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496603', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'name': 'volume-24d05613-a175-4448-bd5a-122c9a2e08ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '12843fba-0240-44fb-9687-d34a6333011b', 'attached_at': '', 'detached_at': '', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'serial': '24d05613-a175-4448-bd5a-122c9a2e08ac'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1156.402980] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a40c426b-4dd7-4a23-8f61-cae8f9e87755 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.409748] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1156.409748] env[62383]: value = "task-2452473" [ 1156.409748] env[62383]: _type = "Task" [ 1156.409748] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.417608] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452473, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.684911] env[62383]: DEBUG oslo_concurrency.lockutils [req-9ecb164b-ebf9-42c7-bc10-eb05d430f78b req-632b8864-2b40-4fd5-9393-cb16802c120d service nova] Releasing lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1156.741399] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1156.757679] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452470, 'name': CreateVM_Task, 'duration_secs': 1.306972} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.757850] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1156.758518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.758685] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.759016] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1156.759268] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e49812c-33fb-428b-853f-bb979186fe07 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.764766] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1156.764766] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5211fc34-7dfd-402a-6139-51a2419457bf" [ 1156.764766] env[62383]: _type = "Task" [ 1156.764766] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.773375] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5211fc34-7dfd-402a-6139-51a2419457bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.867140] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.901699] env[62383]: INFO nova.scheduler.client.report [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Deleted allocations for instance 0392d059-57ea-49fb-84d2-b71cbca840db [ 1156.921755] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452473, 'name': Rename_Task, 'duration_secs': 0.17972} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.922319] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1156.922603] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-711493a5-b0de-4a9d-a8fa-f0ab56290acd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.930655] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1156.930655] env[62383]: value = "task-2452474" [ 1156.930655] env[62383]: _type = "Task" [ 1156.930655] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.940035] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.031769] env[62383]: DEBUG nova.compute.manager [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1157.031921] env[62383]: DEBUG nova.compute.manager [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing instance network info cache due to event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1157.032229] env[62383]: DEBUG oslo_concurrency.lockutils [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1157.032422] env[62383]: DEBUG oslo_concurrency.lockutils [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.033108] env[62383]: DEBUG nova.network.neutron [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1157.276279] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5211fc34-7dfd-402a-6139-51a2419457bf, 'name': SearchDatastore_Task, 'duration_secs': 0.011344} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.276623] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.276965] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1157.277092] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1157.277244] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.277425] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1157.277687] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e06a49a5-9dd6-43a0-8877-bea7ecfc8b9d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.285619] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1157.285793] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1157.286503] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b3137a6-aced-4126-bccc-004f8fda8d72 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.291215] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1157.291215] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b5b066-e348-859b-d6d9-5456db668ac6" [ 1157.291215] env[62383]: _type = "Task" [ 1157.291215] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.300316] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b5b066-e348-859b-d6d9-5456db668ac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.397501] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.411402] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c6941f45-06e2-4504-9d60-2143deec54a6 tempest-ServerActionsTestOtherB-255557430 tempest-ServerActionsTestOtherB-255557430-project-member] Lock "0392d059-57ea-49fb-84d2-b71cbca840db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.537s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1157.441496] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452474, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.801064] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b5b066-e348-859b-d6d9-5456db668ac6, 'name': SearchDatastore_Task, 'duration_secs': 0.009849} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.801797] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83c71f49-0001-4e51-8d98-85ce56749069 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.806889] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1157.806889] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524fb5e3-f07a-5466-0712-b03f639a6a85" [ 1157.806889] env[62383]: _type = "Task" [ 1157.806889] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.814286] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524fb5e3-f07a-5466-0712-b03f639a6a85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.849370] env[62383]: DEBUG nova.network.neutron [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updated VIF entry in instance network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.849781] env[62383]: DEBUG nova.network.neutron [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.902044] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-0392d059-57ea-49fb-84d2-b71cbca840db" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.902317] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1157.902580] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.902756] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.902982] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.903169] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.903299] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.903432] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1157.940991] env[62383]: DEBUG oslo_vmware.api [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452474, 'name': PowerOnVM_Task, 'duration_secs': 0.783257} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.941253] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.941451] env[62383]: INFO nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Took 5.16 seconds to spawn the instance on the hypervisor. [ 1157.941628] env[62383]: DEBUG nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1157.942412] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2864d40-f5ef-40a2-8976-c89c4a399d9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.076015] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.076263] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.079952] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.080174] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.321328] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524fb5e3-f07a-5466-0712-b03f639a6a85, 'name': SearchDatastore_Task, 'duration_secs': 0.01349} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.321540] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.321808] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] b6ed40a8-674f-4179-8642-848ab0a2d31b/b6ed40a8-674f-4179-8642-848ab0a2d31b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1158.322090] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-311ce1d4-6a45-4219-9ae1-3e5f792f6a90 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.328884] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1158.328884] env[62383]: value = "task-2452476" [ 1158.328884] env[62383]: _type = "Task" [ 1158.328884] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.337316] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.352709] env[62383]: DEBUG oslo_concurrency.lockutils [req-b19e624a-86fd-4f8d-9383-7d952ad74731 req-9f9db372-a2d9-4ada-9c05-d4bdea578862 service nova] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.459420] env[62383]: INFO nova.compute.manager [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Took 12.96 seconds to build instance. [ 1158.581104] env[62383]: DEBUG nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1158.583734] env[62383]: DEBUG nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1158.839789] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452476, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.961444] env[62383]: DEBUG oslo_concurrency.lockutils [None req-028eeea6-6504-4d2b-b1bd-78ff62390603 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.470s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.110060] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.110379] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.112195] env[62383]: INFO nova.compute.claims [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1159.116729] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1159.339560] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452476, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.850191} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.339848] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] b6ed40a8-674f-4179-8642-848ab0a2d31b/b6ed40a8-674f-4179-8642-848ab0a2d31b.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1159.340033] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1159.340283] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdb2c60c-7355-4cbe-b954-bf603945566c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.347325] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1159.347325] env[62383]: value = "task-2452477" [ 1159.347325] env[62383]: _type = "Task" [ 1159.347325] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.355140] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.529494] env[62383]: DEBUG nova.compute.manager [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Received event network-changed-845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1159.529751] env[62383]: DEBUG nova.compute.manager [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Refreshing instance network info cache due to event network-changed-845110d3-620c-4852-8aab-e6907d5b3af2. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1159.529959] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] Acquiring lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1159.530280] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] Acquired lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.530477] env[62383]: DEBUG nova.network.neutron [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Refreshing network info cache for port 845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.857835] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075297} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.858196] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1159.859051] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23085899-6071-4211-a8a3-93573dfab98b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.883256] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] b6ed40a8-674f-4179-8642-848ab0a2d31b/b6ed40a8-674f-4179-8642-848ab0a2d31b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.883527] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6389572-2ef4-482b-a803-0395af0bab32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.904018] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1159.904018] env[62383]: value = "task-2452478" [ 1159.904018] env[62383]: _type = "Task" [ 1159.904018] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.912668] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.230376] env[62383]: DEBUG nova.network.neutron [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updated VIF entry in instance network info cache for port 845110d3-620c-4852-8aab-e6907d5b3af2. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.230798] env[62383]: DEBUG nova.network.neutron [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updating instance_info_cache with network_info: [{"id": "845110d3-620c-4852-8aab-e6907d5b3af2", "address": "fa:16:3e:0d:f6:13", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap845110d3-62", "ovs_interfaceid": "845110d3-620c-4852-8aab-e6907d5b3af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.250414] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b869a9-bb23-41c9-a43e-abfd68482112 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.259840] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09936fe6-e93a-47f7-a407-96e853dab14b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.290332] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0b55fe-c83e-4e89-8e07-297a84821a13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.298225] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53b277b-94da-4238-9978-604e2a500567 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.312502] env[62383]: DEBUG nova.compute.provider_tree [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.414323] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452478, 'name': ReconfigVM_Task, 'duration_secs': 0.262238} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.415706] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Reconfigured VM instance instance-00000074 to attach disk [datastore2] b6ed40a8-674f-4179-8642-848ab0a2d31b/b6ed40a8-674f-4179-8642-848ab0a2d31b.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1160.419517] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4afedec-2166-4095-9b53-54632d737d1a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.426635] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1160.426635] env[62383]: value = "task-2452479" [ 1160.426635] env[62383]: _type = "Task" [ 1160.426635] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.435834] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452479, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.735731] env[62383]: DEBUG oslo_concurrency.lockutils [req-1e701925-354c-496c-a19b-d92876a2ca9f req-88b66060-8905-43fa-a438-f80c7a7624c4 service nova] Releasing lock "refresh_cache-1b025655-acad-4b70-9e1a-489683cafb7e" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.816257] env[62383]: DEBUG nova.scheduler.client.report [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.931433] env[62383]: DEBUG nova.compute.manager [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1160.941764] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452479, 'name': Rename_Task, 'duration_secs': 0.139169} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.942041] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1160.945018] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-022dcc95-46eb-46eb-978b-ac43dbb8d24f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.949136] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1160.949136] env[62383]: value = "task-2452480" [ 1160.949136] env[62383]: _type = "Task" [ 1160.949136] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.956884] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452480, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.321502] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1161.322047] env[62383]: DEBUG nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1161.325189] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.209s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1161.326610] env[62383]: INFO nova.compute.claims [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.457112] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1161.460340] env[62383]: DEBUG oslo_vmware.api [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452480, 'name': PowerOnVM_Task, 'duration_secs': 0.459301} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.460618] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1161.460865] env[62383]: INFO nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1161.461074] env[62383]: DEBUG nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1161.461849] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbec87c-1531-4d40-a1ea-4d42beb11163 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.566256] env[62383]: DEBUG nova.compute.manager [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Received event network-changed-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1161.566462] env[62383]: DEBUG nova.compute.manager [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Refreshing instance network info cache due to event network-changed-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1161.566715] env[62383]: DEBUG oslo_concurrency.lockutils [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] Acquiring lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1161.566848] env[62383]: DEBUG oslo_concurrency.lockutils [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] Acquired lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.567049] env[62383]: DEBUG nova.network.neutron [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Refreshing network info cache for port 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.831450] env[62383]: DEBUG nova.compute.utils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1161.835512] env[62383]: DEBUG nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1161.835640] env[62383]: DEBUG nova.network.neutron [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1161.907543] env[62383]: DEBUG nova.policy [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1161.979704] env[62383]: INFO nova.compute.manager [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Took 13.78 seconds to build instance. [ 1162.278406] env[62383]: DEBUG nova.network.neutron [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updated VIF entry in instance network info cache for port 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1162.278782] env[62383]: DEBUG nova.network.neutron [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [{"id": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "address": "fa:16:3e:67:44:eb", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a7f4f55-9c", "ovs_interfaceid": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.336769] env[62383]: DEBUG nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1162.471292] env[62383]: DEBUG nova.network.neutron [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Successfully created port: 7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1162.482742] env[62383]: DEBUG oslo_concurrency.lockutils [None req-eff8d904-a4d9-41ab-aa66-e7e0ec540e15 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.664s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.546703] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f41a22-7780-4d44-935e-b31ee3472460 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.555803] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa919d6-c39d-422c-8701-0f17e02939d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.594129] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd713802-2a77-4d3d-b86c-65ea8718e55f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.601749] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf19483-b889-4233-9940-3628b73fbe7d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.615647] env[62383]: DEBUG nova.compute.provider_tree [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.781067] env[62383]: DEBUG oslo_concurrency.lockutils [req-42381db7-8388-460a-90bd-3ad8a6ecee1d req-7fd28198-16af-4385-8b49-5bf3ec53fe0d service nova] Releasing lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1163.118829] env[62383]: DEBUG nova.scheduler.client.report [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.350777] env[62383]: DEBUG nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1163.377196] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1163.377483] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1163.377650] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1163.377837] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1163.377986] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1163.378155] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1163.378354] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1163.378509] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1163.378675] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1163.378837] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1163.379017] env[62383]: DEBUG nova.virt.hardware [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1163.380228] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1d9079-0e06-4a8f-9c58-26c43795a745 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.388162] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75003103-644c-4502-a9de-96f7b08e13d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.623518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.298s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.623990] env[62383]: DEBUG nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1163.627587] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.171s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.735947] env[62383]: DEBUG nova.compute.manager [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1164.131160] env[62383]: DEBUG nova.compute.utils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1164.134574] env[62383]: INFO nova.compute.claims [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1164.139409] env[62383]: DEBUG nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1164.139618] env[62383]: DEBUG nova.network.neutron [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.178278] env[62383]: DEBUG nova.policy [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36584e4a6b9542918e45e11370c6cfbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b395bdf2df794b32a117f93fa4887c8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.265190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.400940] env[62383]: DEBUG nova.compute.manager [req-f5378ec3-4e2c-45cf-a3e1-f5f718c39346 req-349b4433-e991-4a75-8e54-d34c5212c3c3 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-vif-plugged-7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1164.401313] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5378ec3-4e2c-45cf-a3e1-f5f718c39346 req-349b4433-e991-4a75-8e54-d34c5212c3c3 service nova] Acquiring lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.401644] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5378ec3-4e2c-45cf-a3e1-f5f718c39346 req-349b4433-e991-4a75-8e54-d34c5212c3c3 service nova] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1164.401912] env[62383]: DEBUG oslo_concurrency.lockutils [req-f5378ec3-4e2c-45cf-a3e1-f5f718c39346 req-349b4433-e991-4a75-8e54-d34c5212c3c3 service nova] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.402110] env[62383]: DEBUG nova.compute.manager [req-f5378ec3-4e2c-45cf-a3e1-f5f718c39346 req-349b4433-e991-4a75-8e54-d34c5212c3c3 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] No waiting events found dispatching network-vif-plugged-7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1164.402283] env[62383]: WARNING nova.compute.manager [req-f5378ec3-4e2c-45cf-a3e1-f5f718c39346 req-349b4433-e991-4a75-8e54-d34c5212c3c3 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received unexpected event network-vif-plugged-7f9cebec-50e0-428c-a19c-f39af2719a65 for instance with vm_state building and task_state spawning. [ 1164.465429] env[62383]: DEBUG nova.network.neutron [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Successfully created port: f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1164.617624] env[62383]: DEBUG nova.network.neutron [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Successfully updated port: 7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1164.639965] env[62383]: INFO nova.compute.resource_tracker [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating resource usage from migration a68e057a-f5d0-4a72-90a1-add3b56e984a [ 1164.643150] env[62383]: DEBUG nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1164.790953] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32923ab7-878f-4a2b-8b71-80af62fc51f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.798792] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc60136-241f-435c-8b3f-017ac4b1ef54 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.830626] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b06e442-818d-476d-bfc1-1e16ab1cc7eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.838675] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bb2463-08c9-4260-9870-5ce3f8bdf4aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.852325] env[62383]: DEBUG nova.compute.provider_tree [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.121108] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1165.121308] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.121470] env[62383]: DEBUG nova.network.neutron [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1165.356094] env[62383]: DEBUG nova.scheduler.client.report [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.651899] env[62383]: DEBUG nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1165.655299] env[62383]: DEBUG nova.network.neutron [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1165.683047] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1165.683047] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1165.683047] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1165.683047] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1165.683047] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1165.683639] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1165.684434] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1165.685097] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1165.685449] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1165.685746] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1165.686055] env[62383]: DEBUG nova.virt.hardware [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1165.687227] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2022963e-c3e4-448a-ab10-83dde966843d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.696791] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77c6148-2251-4640-96b2-5973f1876df9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.861585] env[62383]: DEBUG nova.network.neutron [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.864244] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.236s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.864512] env[62383]: INFO nova.compute.manager [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Migrating [ 1165.886184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.621s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.935119] env[62383]: DEBUG nova.compute.manager [req-3cb1d3fa-1495-4f58-9f3f-0fdcc7f10dd6 req-9adc2e88-444a-44f2-ac3e-3d2d07f052ce service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-vif-plugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1165.935393] env[62383]: DEBUG oslo_concurrency.lockutils [req-3cb1d3fa-1495-4f58-9f3f-0fdcc7f10dd6 req-9adc2e88-444a-44f2-ac3e-3d2d07f052ce service nova] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.935714] env[62383]: DEBUG oslo_concurrency.lockutils [req-3cb1d3fa-1495-4f58-9f3f-0fdcc7f10dd6 req-9adc2e88-444a-44f2-ac3e-3d2d07f052ce service nova] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.935771] env[62383]: DEBUG oslo_concurrency.lockutils [req-3cb1d3fa-1495-4f58-9f3f-0fdcc7f10dd6 req-9adc2e88-444a-44f2-ac3e-3d2d07f052ce service nova] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.935931] env[62383]: DEBUG nova.compute.manager [req-3cb1d3fa-1495-4f58-9f3f-0fdcc7f10dd6 req-9adc2e88-444a-44f2-ac3e-3d2d07f052ce service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] No waiting events found dispatching network-vif-plugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1165.936388] env[62383]: WARNING nova.compute.manager [req-3cb1d3fa-1495-4f58-9f3f-0fdcc7f10dd6 req-9adc2e88-444a-44f2-ac3e-3d2d07f052ce service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received unexpected event network-vif-plugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 for instance with vm_state building and task_state spawning. [ 1166.095384] env[62383]: DEBUG nova.network.neutron [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Successfully updated port: f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1166.389245] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1166.389245] env[62383]: DEBUG nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Instance network_info: |[{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1166.389245] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:77:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f9cebec-50e0-428c-a19c-f39af2719a65', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1166.398587] env[62383]: DEBUG oslo.service.loopingcall [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1166.400913] env[62383]: INFO nova.compute.claims [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1166.405374] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.405611] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.405838] env[62383]: DEBUG nova.network.neutron [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1166.410018] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1166.410018] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1132541d-fff2-4053-8ec3-e59e0c6f5347 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.435387] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1166.435387] env[62383]: value = "task-2452481" [ 1166.435387] env[62383]: _type = "Task" [ 1166.435387] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.442656] env[62383]: DEBUG nova.compute.manager [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1166.443398] env[62383]: DEBUG nova.compute.manager [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing instance network info cache due to event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1166.443398] env[62383]: DEBUG oslo_concurrency.lockutils [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.443398] env[62383]: DEBUG oslo_concurrency.lockutils [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.444024] env[62383]: DEBUG nova.network.neutron [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1166.451042] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452481, 'name': CreateVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.598947] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.598947] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.599151] env[62383]: DEBUG nova.network.neutron [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1166.909346] env[62383]: INFO nova.compute.resource_tracker [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating resource usage from migration ed4c80d6-347b-468f-9849-4295e19c0d1c [ 1166.947592] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452481, 'name': CreateVM_Task, 'duration_secs': 0.336055} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.949667] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1166.950614] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1166.950796] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.951177] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1166.957218] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b97ae51a-1f10-49cb-86c0-01bc1b8b12ff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.958743] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1166.958743] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52964905-406f-26af-7786-ecc621e1c8be" [ 1166.958743] env[62383]: _type = "Task" [ 1166.958743] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.967020] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52964905-406f-26af-7786-ecc621e1c8be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.094516] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3e7f0d-8b57-4d05-bc51-08293160fa0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.105575] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979785f9-1574-4aaf-acc2-46a12f665711 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.138480] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4aa02c-d695-4077-98a6-d860300a511b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.141985] env[62383]: DEBUG nova.network.neutron [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1167.150177] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a12bf00-d564-496d-b7af-e62f8547b826 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.165577] env[62383]: DEBUG nova.compute.provider_tree [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.175885] env[62383]: DEBUG nova.network.neutron [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updated VIF entry in instance network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1167.176255] env[62383]: DEBUG nova.network.neutron [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.243567] env[62383]: DEBUG nova.network.neutron [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [{"id": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "address": "fa:16:3e:67:44:eb", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a7f4f55-9c", "ovs_interfaceid": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.315087] env[62383]: DEBUG nova.network.neutron [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.469771] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52964905-406f-26af-7786-ecc621e1c8be, 'name': SearchDatastore_Task, 'duration_secs': 0.009952} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.470109] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.470109] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1167.470276] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1167.470426] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.470606] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1167.470864] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4475b3cc-c8ff-4a0d-8d03-562ad1acb467 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.478969] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1167.479077] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1167.480292] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0ad5cce-0c1d-48f4-897b-06446834ce5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.485550] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1167.485550] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b813a3-e9f6-94b6-4903-da63c8c02de4" [ 1167.485550] env[62383]: _type = "Task" [ 1167.485550] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.493655] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b813a3-e9f6-94b6-4903-da63c8c02de4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.671202] env[62383]: DEBUG nova.scheduler.client.report [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.678454] env[62383]: DEBUG oslo_concurrency.lockutils [req-872754d3-2c4b-49c9-b888-b2bc492d234f req-28b6c94f-ec6a-47bc-95a4-cb1d0ffb4587 service nova] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.746272] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.817654] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1167.818014] env[62383]: DEBUG nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Instance network_info: |[{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1167.818470] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:77:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7908211b-df93-467b-87a8-3c3d29b03de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1d1962c-c0f2-4e5b-9885-b71019f7e792', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1167.826088] env[62383]: DEBUG oslo.service.loopingcall [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1167.826614] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1167.826849] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74568edf-7823-4b5b-aea9-36cc750ffbdc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.846606] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1167.846606] env[62383]: value = "task-2452485" [ 1167.846606] env[62383]: _type = "Task" [ 1167.846606] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.854055] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452485, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.960406] env[62383]: DEBUG nova.compute.manager [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1167.960644] env[62383]: DEBUG nova.compute.manager [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing instance network info cache due to event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1167.960895] env[62383]: DEBUG oslo_concurrency.lockutils [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1167.961102] env[62383]: DEBUG oslo_concurrency.lockutils [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.961306] env[62383]: DEBUG nova.network.neutron [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1167.996289] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b813a3-e9f6-94b6-4903-da63c8c02de4, 'name': SearchDatastore_Task, 'duration_secs': 0.007763} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.997057] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-990234b4-0c6c-43dd-a664-5aa0ba016553 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.002319] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1168.002319] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52798758-945f-4fb9-9721-96ff8ac56eaa" [ 1168.002319] env[62383]: _type = "Task" [ 1168.002319] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.009492] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52798758-945f-4fb9-9721-96ff8ac56eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.175996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.290s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.176184] env[62383]: INFO nova.compute.manager [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Migrating [ 1168.357557] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452485, 'name': CreateVM_Task, 'duration_secs': 0.343979} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.357750] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1168.358455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.358629] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.358955] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1168.359222] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27d3d819-e094-4dcb-8f01-dac29da5a334 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.363609] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1168.363609] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]5239f8bf-ed46-42a6-6172-5aee8415b1f7" [ 1168.363609] env[62383]: _type = "Task" [ 1168.363609] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.370887] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5239f8bf-ed46-42a6-6172-5aee8415b1f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.512072] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52798758-945f-4fb9-9721-96ff8ac56eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.512378] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.512587] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a4e3b5a2-98c2-4376-bafd-49ccee64b262/a4e3b5a2-98c2-4376-bafd-49ccee64b262.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1168.512817] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f485453-88c9-4900-a40d-5a21e8322e70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.519222] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1168.519222] env[62383]: value = "task-2452486" [ 1168.519222] env[62383]: _type = "Task" [ 1168.519222] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.526615] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.664274] env[62383]: DEBUG nova.network.neutron [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updated VIF entry in instance network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1168.664844] env[62383]: DEBUG nova.network.neutron [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.690956] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.691190] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.691524] env[62383]: DEBUG nova.network.neutron [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.876417] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]5239f8bf-ed46-42a6-6172-5aee8415b1f7, 'name': SearchDatastore_Task, 'duration_secs': 0.012046} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.876754] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1168.877045] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1168.877259] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.877412] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.877601] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1168.877909] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bbb361c-0e55-41ed-b319-d342a426918e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.893465] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1168.893709] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1168.894870] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a1ec29a-a348-4654-8c2b-b80614a2eaff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.901586] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1168.901586] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b1659f-f42c-93c9-5a68-9679e13dc4bf" [ 1168.901586] env[62383]: _type = "Task" [ 1168.901586] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.910823] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b1659f-f42c-93c9-5a68-9679e13dc4bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.030053] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452486, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460144} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.030221] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a4e3b5a2-98c2-4376-bafd-49ccee64b262/a4e3b5a2-98c2-4376-bafd-49ccee64b262.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1169.030356] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1169.030823] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36db9f5e-41d5-4432-93ee-983a629212da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.037920] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1169.037920] env[62383]: value = "task-2452487" [ 1169.037920] env[62383]: _type = "Task" [ 1169.037920] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.046482] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.167687] env[62383]: DEBUG oslo_concurrency.lockutils [req-7d98a9b8-db92-4af3-be33-218b637defbe req-1cf887fb-849b-47d8-9ba3-cf42a7043132 service nova] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.261100] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e69b06c-f54e-4381-a6d7-3037fe1034f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.283513] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance '12843fba-0240-44fb-9687-d34a6333011b' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1169.412976] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b1659f-f42c-93c9-5a68-9679e13dc4bf, 'name': SearchDatastore_Task, 'duration_secs': 0.05063} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.413863] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a30268d5-265f-41ee-9f05-996631747a4b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.419925] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1169.419925] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52962503-a453-b310-d396-31f9b0c91b5b" [ 1169.419925] env[62383]: _type = "Task" [ 1169.419925] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.429657] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52962503-a453-b310-d396-31f9b0c91b5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.438879] env[62383]: DEBUG nova.network.neutron [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance_info_cache with network_info: [{"id": "cbb2fb29-a758-4def-88a8-416db1bb8301", "address": "fa:16:3e:8a:55:fa", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbb2fb29-a7", "ovs_interfaceid": "cbb2fb29-a758-4def-88a8-416db1bb8301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.550385] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068856} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.550838] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1169.551593] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcad1aa-8ea8-4936-9ac9-a8b7ef178a98 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.576481] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] a4e3b5a2-98c2-4376-bafd-49ccee64b262/a4e3b5a2-98c2-4376-bafd-49ccee64b262.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1169.576833] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ef00aea-4422-4a6a-b514-39b15c647ecf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.595991] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1169.595991] env[62383]: value = "task-2452488" [ 1169.595991] env[62383]: _type = "Task" [ 1169.595991] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.604317] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.793517] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1169.793517] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-397fefa7-2d64-4d15-b76c-8ec72dc994c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.801202] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1169.801202] env[62383]: value = "task-2452489" [ 1169.801202] env[62383]: _type = "Task" [ 1169.801202] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.811392] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.933178] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52962503-a453-b310-d396-31f9b0c91b5b, 'name': SearchDatastore_Task, 'duration_secs': 0.011887} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.933674] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.933959] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1169.934561] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39f5274c-752f-40d0-ad68-16f29e3ecd4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.941886] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.947659] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1169.947659] env[62383]: value = "task-2452490" [ 1169.947659] env[62383]: _type = "Task" [ 1169.947659] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.960045] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.106560] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452488, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.314368] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452489, 'name': PowerOffVM_Task, 'duration_secs': 0.289055} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.314771] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1170.315092] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance '12843fba-0240-44fb-9687-d34a6333011b' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1170.461933] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.609536] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452488, 'name': ReconfigVM_Task, 'duration_secs': 0.687846} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.610017] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Reconfigured VM instance instance-00000075 to attach disk [datastore2] a4e3b5a2-98c2-4376-bafd-49ccee64b262/a4e3b5a2-98c2-4376-bafd-49ccee64b262.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1170.610405] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32141f29-f1cf-42f2-91ba-a7f2a9fd8d3e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.618676] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1170.618676] env[62383]: value = "task-2452492" [ 1170.618676] env[62383]: _type = "Task" [ 1170.618676] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.629386] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452492, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.824258] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1170.824519] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1170.824691] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1170.824928] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1170.825159] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1170.825391] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1170.825607] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1170.825859] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1170.826092] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1170.826278] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1170.826482] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1170.832758] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91cd4967-dedf-4c6c-8989-550ea4f3eca4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.854817] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1170.854817] env[62383]: value = "task-2452493" [ 1170.854817] env[62383]: _type = "Task" [ 1170.854817] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.869931] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452493, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.963388] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452490, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.129342] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452492, 'name': Rename_Task, 'duration_secs': 0.348941} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.129633] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1171.129949] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56749162-cb01-4ec2-8f54-9a2f7e9d5201 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.136240] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1171.136240] env[62383]: value = "task-2452494" [ 1171.136240] env[62383]: _type = "Task" [ 1171.136240] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.145806] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.365402] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452493, 'name': ReconfigVM_Task, 'duration_secs': 0.380413} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.365743] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance '12843fba-0240-44fb-9687-d34a6333011b' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1171.461435] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452490, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.035249} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.461685] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1171.461893] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1171.462158] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-201cf39e-d510-4fe4-86ed-e61d5425a888 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.467302] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028ffa43-35a1-4637-87a6-52ab37fc3972 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.470685] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1171.470685] env[62383]: value = "task-2452495" [ 1171.470685] env[62383]: _type = "Task" [ 1171.470685] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.486663] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance 'b6ed40a8-674f-4179-8642-848ab0a2d31b' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1171.494392] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.648250] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452494, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.872877] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1171.873134] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1171.873292] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1171.873484] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1171.873638] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1171.873787] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1171.873990] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1171.874174] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1171.874353] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1171.874508] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1171.874682] env[62383]: DEBUG nova.virt.hardware [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1171.879912] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1171.880227] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97fc5f02-a972-423b-a1a7-0986710f6cfa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.897833] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1171.897833] env[62383]: value = "task-2452496" [ 1171.897833] env[62383]: _type = "Task" [ 1171.897833] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.905396] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.981959] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064315} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.982261] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1171.983035] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd66bd12-ce35-4998-9faa-f62a3ac9a3de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.998183] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1172.007693] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.007960] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51477402-a476-430b-9f85-e695c2899bab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.009526] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67ad12c8-eb68-4f21-9fd1-1f7afa699979 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.028450] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1172.028450] env[62383]: value = "task-2452497" [ 1172.028450] env[62383]: _type = "Task" [ 1172.028450] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.032398] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1172.032398] env[62383]: value = "task-2452498" [ 1172.032398] env[62383]: _type = "Task" [ 1172.032398] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.041052] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.043941] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.147204] env[62383]: DEBUG oslo_vmware.api [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452494, 'name': PowerOnVM_Task, 'duration_secs': 0.536783} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.147519] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1172.147847] env[62383]: INFO nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1172.148082] env[62383]: DEBUG nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1172.148951] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd6901c-d936-4412-b4ca-d6aaacbf734d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.407449] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452496, 'name': ReconfigVM_Task, 'duration_secs': 0.167375} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.407731] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1172.408514] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb34d73-08df-4fe2-a531-030d1dc6e35d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.430101] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] volume-24d05613-a175-4448-bd5a-122c9a2e08ac/volume-24d05613-a175-4448-bd5a-122c9a2e08ac.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1172.430391] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd656da6-4d51-4fe0-84d6-fcc0218029f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.448457] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1172.448457] env[62383]: value = "task-2452500" [ 1172.448457] env[62383]: _type = "Task" [ 1172.448457] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.456455] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452500, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.539444] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452497, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.544249] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452498, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.669771] env[62383]: INFO nova.compute.manager [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Took 13.58 seconds to build instance. [ 1172.960565] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452500, 'name': ReconfigVM_Task, 'duration_secs': 0.296827} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.960838] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfigured VM instance instance-00000073 to attach disk [datastore2] volume-24d05613-a175-4448-bd5a-122c9a2e08ac/volume-24d05613-a175-4448-bd5a-122c9a2e08ac.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1172.961107] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance '12843fba-0240-44fb-9687-d34a6333011b' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1173.013292] env[62383]: DEBUG nova.compute.manager [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1173.013292] env[62383]: DEBUG nova.compute.manager [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing instance network info cache due to event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1173.013292] env[62383]: DEBUG oslo_concurrency.lockutils [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1173.013292] env[62383]: DEBUG oslo_concurrency.lockutils [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.013292] env[62383]: DEBUG nova.network.neutron [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1173.042879] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452497, 'name': PowerOffVM_Task, 'duration_secs': 0.585615} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.045934] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1173.046350] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance 'b6ed40a8-674f-4179-8642-848ab0a2d31b' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1173.049687] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452498, 'name': ReconfigVM_Task, 'duration_secs': 0.771396} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.051064] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Reconfigured VM instance instance-00000076 to attach disk [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1173.051621] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48cc7d8d-d5e8-4d40-aaec-0ca05204aa91 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.058510] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1173.058510] env[62383]: value = "task-2452501" [ 1173.058510] env[62383]: _type = "Task" [ 1173.058510] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.067773] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452501, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.172373] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a9713cc4-2c8f-454c-b94d-54fcc0b9024f tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.096s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.467953] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e844545d-8da1-48e1-a258-4f149946cbd7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.491025] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54db5267-66cd-47e1-a439-9cd0d8093d5e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.509774] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance '12843fba-0240-44fb-9687-d34a6333011b' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1173.553493] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1173.553787] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1173.553957] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1173.554415] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1173.554538] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1173.554947] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1173.554947] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1173.555261] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1173.555452] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1173.555689] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1173.555915] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1173.562094] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4479189-7209-44bb-be49-cebb11b28df1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.583121] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452501, 'name': Rename_Task, 'duration_secs': 0.166213} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.584440] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1173.584814] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1173.584814] env[62383]: value = "task-2452502" [ 1173.584814] env[62383]: _type = "Task" [ 1173.584814] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.585069] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-878fa35f-2665-45e2-8b28-2c98a4451993 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.595142] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452502, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.598465] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1173.598465] env[62383]: value = "task-2452503" [ 1173.598465] env[62383]: _type = "Task" [ 1173.598465] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.608937] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.741601] env[62383]: DEBUG nova.network.neutron [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updated VIF entry in instance network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.742019] env[62383]: DEBUG nova.network.neutron [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.097062] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452502, 'name': ReconfigVM_Task, 'duration_secs': 0.133522} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.097483] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance 'b6ed40a8-674f-4179-8642-848ab0a2d31b' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1174.113307] env[62383]: DEBUG oslo_vmware.api [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452503, 'name': PowerOnVM_Task, 'duration_secs': 0.478151} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.113307] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1174.113307] env[62383]: INFO nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1174.113307] env[62383]: DEBUG nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1174.113770] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65cf523-ea19-4172-b2b5-77903871ee70 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.245031] env[62383]: DEBUG oslo_concurrency.lockutils [req-d8531c71-32fc-49e9-90ad-cf0abe4db2f2 req-37f6235b-fc40-476b-b38b-dd8fd7ae2410 service nova] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.466982] env[62383]: DEBUG nova.compute.manager [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1174.466982] env[62383]: DEBUG nova.compute.manager [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing instance network info cache due to event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1174.466982] env[62383]: DEBUG oslo_concurrency.lockutils [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1174.466982] env[62383]: DEBUG oslo_concurrency.lockutils [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.466982] env[62383]: DEBUG nova.network.neutron [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1174.609056] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1174.609285] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1174.609529] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1174.609825] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1174.610140] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1174.610445] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1174.610792] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1174.611294] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1174.611583] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1174.611900] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1174.612259] env[62383]: DEBUG nova.virt.hardware [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1174.621682] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Reconfiguring VM instance instance-00000074 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1174.622337] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6c6b9eb-ddd2-489a-98af-7f97bc6f0db0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.645348] env[62383]: INFO nova.compute.manager [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Took 15.56 seconds to build instance. [ 1174.652020] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1174.652020] env[62383]: value = "task-2452505" [ 1174.652020] env[62383]: _type = "Task" [ 1174.652020] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.660301] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.056250] env[62383]: DEBUG nova.compute.manager [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1175.056708] env[62383]: DEBUG nova.compute.manager [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing instance network info cache due to event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1175.056869] env[62383]: DEBUG oslo_concurrency.lockutils [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1175.150293] env[62383]: DEBUG oslo_concurrency.lockutils [None req-302f00dc-fe27-463e-95fe-25424af79857 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.070s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.160200] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452505, 'name': ReconfigVM_Task, 'duration_secs': 0.476} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.160455] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Reconfigured VM instance instance-00000074 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1175.161268] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec50455-e811-42fc-84ad-a7ddd783ce7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.186907] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] b6ed40a8-674f-4179-8642-848ab0a2d31b/b6ed40a8-674f-4179-8642-848ab0a2d31b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1175.188109] env[62383]: DEBUG nova.network.neutron [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Port 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1175.192435] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-050c3a52-2d38-4adc-bce3-e882a7fd4f08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.215183] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1175.215183] env[62383]: value = "task-2452506" [ 1175.215183] env[62383]: _type = "Task" [ 1175.215183] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.227095] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452506, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.274637] env[62383]: DEBUG nova.network.neutron [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updated VIF entry in instance network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1175.275109] env[62383]: DEBUG nova.network.neutron [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1175.726355] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.778103] env[62383]: DEBUG oslo_concurrency.lockutils [req-2cbfe09c-f23b-4b16-b0ba-09a72f1f7957 req-fcf21c6f-f628-4a58-964b-929fc941ea00 service nova] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1175.778607] env[62383]: DEBUG oslo_concurrency.lockutils [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.778831] env[62383]: DEBUG nova.network.neutron [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1176.230423] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "12843fba-0240-44fb-9687-d34a6333011b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1176.230709] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1176.230974] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1176.238346] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452506, 'name': ReconfigVM_Task, 'duration_secs': 0.656103} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.238628] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Reconfigured VM instance instance-00000074 to attach disk [datastore2] b6ed40a8-674f-4179-8642-848ab0a2d31b/b6ed40a8-674f-4179-8642-848ab0a2d31b.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1176.239196] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance 'b6ed40a8-674f-4179-8642-848ab0a2d31b' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.482889] env[62383]: DEBUG nova.network.neutron [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updated VIF entry in instance network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1176.483383] env[62383]: DEBUG nova.network.neutron [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.496042] env[62383]: DEBUG nova.compute.manager [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1176.496290] env[62383]: DEBUG nova.compute.manager [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing instance network info cache due to event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1176.496564] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1176.496743] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.496945] env[62383]: DEBUG nova.network.neutron [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1176.745847] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79712c30-a00c-465d-87a0-28172ff07e38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.768731] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6995692-d720-49c4-8ca7-4183e5ff7256 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.787142] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance 'b6ed40a8-674f-4179-8642-848ab0a2d31b' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.986090] env[62383]: DEBUG oslo_concurrency.lockutils [req-7e523676-78c7-4733-94e3-faf00b1126f7 req-560b460f-ff5b-49c4-9d6b-662807b531d7 service nova] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.082717] env[62383]: DEBUG nova.compute.manager [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1177.082934] env[62383]: DEBUG nova.compute.manager [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing instance network info cache due to event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1177.083146] env[62383]: DEBUG oslo_concurrency.lockutils [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.083340] env[62383]: DEBUG oslo_concurrency.lockutils [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.083530] env[62383]: DEBUG nova.network.neutron [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1177.287218] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1177.287444] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.287596] env[62383]: DEBUG nova.network.neutron [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1177.323980] env[62383]: DEBUG nova.network.neutron [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updated VIF entry in instance network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1177.325031] env[62383]: DEBUG nova.network.neutron [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.339767] env[62383]: DEBUG nova.network.neutron [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Port cbb2fb29-a758-4def-88a8-416db1bb8301 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1177.826859] env[62383]: DEBUG oslo_concurrency.lockutils [req-2f24d18c-a630-4118-a6cb-74cbea9fe2ba req-9d8f76c1-f008-4d77-a23d-fab256845a7e service nova] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1177.895433] env[62383]: DEBUG nova.network.neutron [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updated VIF entry in instance network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1177.895857] env[62383]: DEBUG nova.network.neutron [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.035614] env[62383]: DEBUG nova.network.neutron [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [{"id": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "address": "fa:16:3e:67:44:eb", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a7f4f55-9c", "ovs_interfaceid": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.223655] env[62383]: DEBUG nova.compute.manager [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Stashing vm_state: active {{(pid=62383) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1178.366317] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.366576] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.366714] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.398606] env[62383]: DEBUG oslo_concurrency.lockutils [req-3d1e936f-8e9f-4199-943b-35e36deded49 req-9b139f01-24d7-47c0-87be-24f13c475a14 service nova] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.540883] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1178.745068] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.745345] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.051375] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30dfeaa-0505-43e1-a00f-05a378e21716 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.059292] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9396f3dc-c902-4b86-8375-60c0366f1df5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.250427] env[62383]: INFO nova.compute.claims [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1179.406130] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.406397] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.406500] env[62383]: DEBUG nova.network.neutron [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.756918] env[62383]: INFO nova.compute.resource_tracker [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating resource usage from migration fc9ebb27-6dc4-471c-b07f-8614cc1d8654 [ 1179.892338] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e855d5b-f669-4514-85d0-8587c6b5a73f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.900310] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b8e95c-35aa-4388-a5c8-f64dae1ef1d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.933009] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c75eea-99a0-4c3c-928f-358ff61f0d71 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.940173] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d27ec5c-a3f0-4e62-beac-97986c150fe5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.953327] env[62383]: DEBUG nova.compute.provider_tree [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.114780] env[62383]: DEBUG nova.network.neutron [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance_info_cache with network_info: [{"id": "cbb2fb29-a758-4def-88a8-416db1bb8301", "address": "fa:16:3e:8a:55:fa", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbb2fb29-a7", "ovs_interfaceid": "cbb2fb29-a758-4def-88a8-416db1bb8301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.152164] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6907dd62-6b4c-4d8f-9d46-7e2dea66b1e0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.171539] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509eeb67-92fe-4a25-8b34-544c3efed187 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.178956] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance '12843fba-0240-44fb-9687-d34a6333011b' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1180.456738] env[62383]: DEBUG nova.scheduler.client.report [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.618040] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1180.685382] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1180.685702] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71f681c2-fc2b-4f8e-ad3a-fabf8756cdc7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.693661] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1180.693661] env[62383]: value = "task-2452509" [ 1180.693661] env[62383]: _type = "Task" [ 1180.693661] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.701825] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452509, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.962105] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.217s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.962285] env[62383]: INFO nova.compute.manager [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Migrating [ 1181.138520] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0996b58d-18c5-4138-8567-d6d684c348c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.159012] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb93c98c-53e9-4bce-9236-a7474023a7be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.164868] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance 'b6ed40a8-674f-4179-8642-848ab0a2d31b' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1181.205079] env[62383]: DEBUG oslo_vmware.api [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452509, 'name': PowerOnVM_Task, 'duration_secs': 0.392738} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.205079] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1181.205306] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f0bc7fa6-7db1-4584-b69e-d5d58ec3a51c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance '12843fba-0240-44fb-9687-d34a6333011b' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1181.478052] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.478430] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.478430] env[62383]: DEBUG nova.network.neutron [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1181.672059] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1181.672059] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8bf09e7-642d-4be1-a520-16ec6a6df529 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.681149] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1181.681149] env[62383]: value = "task-2452510" [ 1181.681149] env[62383]: _type = "Task" [ 1181.681149] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.689806] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.191951] env[62383]: DEBUG oslo_vmware.api [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452510, 'name': PowerOnVM_Task, 'duration_secs': 0.503233} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.192901] env[62383]: DEBUG nova.network.neutron [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.194143] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1182.194351] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f650b847-6fd2-4e80-8268-572206d4bece tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance 'b6ed40a8-674f-4179-8642-848ab0a2d31b' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1182.704208] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.953866] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "12843fba-0240-44fb-9687-d34a6333011b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.954165] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.954390] env[62383]: DEBUG nova.compute.manager [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Going to confirm migration 8 {{(pid=62383) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1183.517095] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1183.517339] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquired lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.517605] env[62383]: DEBUG nova.network.neutron [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1183.517821] env[62383]: DEBUG nova.objects.instance [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'info_cache' on Instance uuid 12843fba-0240-44fb-9687-d34a6333011b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1184.228577] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02024f3e-473c-4611-a452-31627c97da76 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.248730] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance 'f7584d2c-5add-4764-9aed-22f7d1674854' progress to 0 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1184.735551] env[62383]: DEBUG nova.network.neutron [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [{"id": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "address": "fa:16:3e:67:44:eb", "network": {"id": "fc95187d-c7e5-41bd-80c5-9c086b9bc87b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-619976327-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aead8ea1d1de4d0d8d8c07dec519d8b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a7f4f55-9c", "ovs_interfaceid": "6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.755184] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1184.755733] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29f647ec-8f26-44ae-8084-573bdf230ceb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.764350] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1184.764350] env[62383]: value = "task-2452513" [ 1184.764350] env[62383]: _type = "Task" [ 1184.764350] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.775728] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.955088] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.955088] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.955296] env[62383]: DEBUG nova.compute.manager [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Going to confirm migration 9 {{(pid=62383) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1185.237996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Releasing lock "refresh_cache-12843fba-0240-44fb-9687-d34a6333011b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1185.238379] env[62383]: DEBUG nova.objects.instance [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'migration_context' on Instance uuid 12843fba-0240-44fb-9687-d34a6333011b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.274925] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452513, 'name': PowerOffVM_Task, 'duration_secs': 0.22111} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.275217] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1185.275404] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance 'f7584d2c-5add-4764-9aed-22f7d1674854' progress to 17 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1185.514788] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.514983] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquired lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.515181] env[62383]: DEBUG nova.network.neutron [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.515367] env[62383]: DEBUG nova.objects.instance [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'info_cache' on Instance uuid b6ed40a8-674f-4179-8642-848ab0a2d31b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.740987] env[62383]: DEBUG nova.objects.base [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Object Instance<12843fba-0240-44fb-9687-d34a6333011b> lazy-loaded attributes: info_cache,migration_context {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1185.742055] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a256e65a-6ce3-49d7-880e-1c646f3fa9c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.763822] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f37453c-1338-47e9-b734-172bf09c5036 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.770030] env[62383]: DEBUG oslo_vmware.api [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1185.770030] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]528b8f05-91cc-f8ff-cb36-88dbd5c2d640" [ 1185.770030] env[62383]: _type = "Task" [ 1185.770030] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.778270] env[62383]: DEBUG oslo_vmware.api [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528b8f05-91cc-f8ff-cb36-88dbd5c2d640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.781302] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:37Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1185.781533] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1185.781692] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1185.781872] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1185.782033] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1185.782190] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1185.782404] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1185.782558] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1185.782723] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1185.782883] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1185.783992] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1185.788281] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a670e341-6107-4c81-9bf5-80c097cbee21 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.805073] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1185.805073] env[62383]: value = "task-2452514" [ 1185.805073] env[62383]: _type = "Task" [ 1185.805073] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.814257] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452514, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.280345] env[62383]: DEBUG oslo_vmware.api [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]528b8f05-91cc-f8ff-cb36-88dbd5c2d640, 'name': SearchDatastore_Task, 'duration_secs': 0.011655} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.280806] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.280899] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.316773] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452514, 'name': ReconfigVM_Task, 'duration_secs': 0.172604} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.317069] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance 'f7584d2c-5add-4764-9aed-22f7d1674854' progress to 33 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1186.714036] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-66cdcb95-1241-47bf-829d-bbcea0032500" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1186.714294] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-66cdcb95-1241-47bf-829d-bbcea0032500" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1186.714687] env[62383]: DEBUG nova.objects.instance [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'flavor' on Instance uuid ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1186.735919] env[62383]: DEBUG nova.network.neutron [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance_info_cache with network_info: [{"id": "cbb2fb29-a758-4def-88a8-416db1bb8301", "address": "fa:16:3e:8a:55:fa", "network": {"id": "2918b29f-5664-4fd2-b8ca-ac94c1407373", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-101746888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c15955328966463fa09401a270d95fe0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcbb2fb29-a7", "ovs_interfaceid": "cbb2fb29-a758-4def-88a8-416db1bb8301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.823365] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1186.823605] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.823762] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1186.823944] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.824110] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1186.824259] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1186.824461] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1186.824626] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1186.824784] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1186.824971] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1186.825122] env[62383]: DEBUG nova.virt.hardware [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1186.830415] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1186.832935] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af0e0471-1f1a-48d5-a344-a9e7fc25941c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.853160] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1186.853160] env[62383]: value = "task-2452515" [ 1186.853160] env[62383]: _type = "Task" [ 1186.853160] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.863896] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452515, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.951393] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779a648e-1670-4b87-b170-e2a5e8b50ee8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.959460] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bd3b2d-28fe-4a11-812c-7814449287a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.990786] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f6d048-e62e-43d2-ab1e-c50cf7118948 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.998452] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf23a8f-0d81-42b1-a0fa-ce9704233b6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.013964] env[62383]: DEBUG nova.compute.provider_tree [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.238213] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Releasing lock "refresh_cache-b6ed40a8-674f-4179-8642-848ab0a2d31b" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.238467] env[62383]: DEBUG nova.objects.instance [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lazy-loading 'migration_context' on Instance uuid b6ed40a8-674f-4179-8642-848ab0a2d31b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.305143] env[62383]: DEBUG nova.objects.instance [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'pci_requests' on Instance uuid ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1187.363732] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452515, 'name': ReconfigVM_Task, 'duration_secs': 0.211027} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.363824] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1187.364612] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a23cb0-b8bf-4963-b4f3-ec43ef91f6c7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.386641] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1187.386891] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbfb992c-c465-4aa4-9b9f-968c259957e4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.406061] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1187.406061] env[62383]: value = "task-2452516" [ 1187.406061] env[62383]: _type = "Task" [ 1187.406061] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.413623] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452516, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.517454] env[62383]: DEBUG nova.scheduler.client.report [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1187.744295] env[62383]: DEBUG nova.objects.base [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1187.745283] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2691bf9f-7536-4516-bd0f-2546bfd00024 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.764669] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e39a039-b2fe-4078-9598-40eabaecb6b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.770044] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1187.770044] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a5e2d2-d2a4-3f4f-77ad-a1fe4be8a283" [ 1187.770044] env[62383]: _type = "Task" [ 1187.770044] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.777621] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a5e2d2-d2a4-3f4f-77ad-a1fe4be8a283, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.807739] env[62383]: DEBUG nova.objects.base [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1187.807961] env[62383]: DEBUG nova.network.neutron [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1187.874843] env[62383]: DEBUG nova.policy [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1187.917249] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452516, 'name': ReconfigVM_Task, 'duration_secs': 0.256655} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.917530] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfigured VM instance instance-00000070 to attach disk [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1187.917805] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance 'f7584d2c-5add-4764-9aed-22f7d1674854' progress to 50 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1188.282314] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a5e2d2-d2a4-3f4f-77ad-a1fe4be8a283, 'name': SearchDatastore_Task, 'duration_secs': 0.007408} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.282540] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.425807] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54f5794-1c44-4e0a-b670-b2ab3f22fd38 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.446560] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92323b2c-1973-4d00-9c4f-0fe9df9a70da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.465365] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance 'f7584d2c-5add-4764-9aed-22f7d1674854' progress to 67 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1188.527591] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.247s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.531057] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.248s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1189.004989] env[62383]: DEBUG nova.network.neutron [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Port fbc86f1d-2da8-4092-baac-7867624b1100 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1189.085110] env[62383]: INFO nova.scheduler.client.report [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted allocation for migration a68e057a-f5d0-4a72-90a1-add3b56e984a [ 1189.185521] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1a64d1-2503-4756-a1b7-19c2bc621bd3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.194708] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c716a03-40e0-4eb3-a938-10edd3508ceb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.227599] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503282d1-bb55-4007-9f17-ffab35159769 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.236407] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5fd006-da81-4a84-8616-bcbf4c68aa09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.251125] env[62383]: DEBUG nova.compute.provider_tree [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.320279] env[62383]: DEBUG nova.network.neutron [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Successfully updated port: 66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1189.590669] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e04f1fd1-fb55-4bcf-af03-4ae63949afd9 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.636s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.757457] env[62383]: DEBUG nova.scheduler.client.report [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.823950] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.824171] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.824357] env[62383]: DEBUG nova.network.neutron [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1190.028028] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1190.028286] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1190.028286] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1190.363479] env[62383]: WARNING nova.network.neutron [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] ce80ab32-a193-40db-be36-b8085e20a4c5 already exists in list: networks containing: ['ce80ab32-a193-40db-be36-b8085e20a4c5']. ignoring it [ 1190.637523] env[62383]: DEBUG nova.network.neutron [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "66cdcb95-1241-47bf-829d-bbcea0032500", "address": "fa:16:3e:a9:d7:47", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cdcb95-12", "ovs_interfaceid": "66cdcb95-1241-47bf-829d-bbcea0032500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.768280] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.238s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1191.060204] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.060325] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.060492] env[62383]: DEBUG nova.network.neutron [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1191.139928] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1191.140535] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1191.140715] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.141603] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20184dd3-6362-406b-b532-4a035c863a4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.159563] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1191.159782] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1191.159942] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1191.160141] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1191.160292] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1191.160440] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1191.160642] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1191.163161] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1191.163161] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1191.163161] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1191.163161] env[62383]: DEBUG nova.virt.hardware [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1191.167515] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Reconfiguring VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1191.167797] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2dac04b4-4d12-42b0-841c-51460df67505 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.185638] env[62383]: DEBUG oslo_vmware.api [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1191.185638] env[62383]: value = "task-2452517" [ 1191.185638] env[62383]: _type = "Task" [ 1191.185638] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.193408] env[62383]: DEBUG oslo_vmware.api [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452517, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.319508] env[62383]: INFO nova.scheduler.client.report [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted allocation for migration ed4c80d6-347b-468f-9849-4295e19c0d1c [ 1191.697326] env[62383]: DEBUG oslo_vmware.api [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.807799] env[62383]: DEBUG nova.network.neutron [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.825263] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.870s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.196738] env[62383]: DEBUG oslo_vmware.api [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452517, 'name': ReconfigVM_Task, 'duration_secs': 0.700442} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.197276] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.197497] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Reconfigured VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1192.311061] env[62383]: DEBUG oslo_concurrency.lockutils [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.702637] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e80678d-8210-4850-a731-97ec2a94c600 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-66cdcb95-1241-47bf-829d-bbcea0032500" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 5.988s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.834318] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8c39eb-b6dc-4d23-9f49-7adc89be43a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.855452] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac3a49e-d5b6-470a-8995-3b62598eb079 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.863278] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance 'f7584d2c-5add-4764-9aed-22f7d1674854' progress to 83 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1192.978730] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.979259] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.979597] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.979818] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.980039] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.982309] env[62383]: INFO nova.compute.manager [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Terminating instance [ 1193.111423] env[62383]: DEBUG nova.compute.manager [req-8fcfc329-284d-4e8e-99d1-f7ae26641beb req-bcca57ec-0e3f-42fe-9928-0cc73062a0c9 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-vif-plugged-66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1193.111648] env[62383]: DEBUG oslo_concurrency.lockutils [req-8fcfc329-284d-4e8e-99d1-f7ae26641beb req-bcca57ec-0e3f-42fe-9928-0cc73062a0c9 service nova] Acquiring lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.111853] env[62383]: DEBUG oslo_concurrency.lockutils [req-8fcfc329-284d-4e8e-99d1-f7ae26641beb req-bcca57ec-0e3f-42fe-9928-0cc73062a0c9 service nova] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.112035] env[62383]: DEBUG oslo_concurrency.lockutils [req-8fcfc329-284d-4e8e-99d1-f7ae26641beb req-bcca57ec-0e3f-42fe-9928-0cc73062a0c9 service nova] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.112209] env[62383]: DEBUG nova.compute.manager [req-8fcfc329-284d-4e8e-99d1-f7ae26641beb req-bcca57ec-0e3f-42fe-9928-0cc73062a0c9 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] No waiting events found dispatching network-vif-plugged-66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1193.112375] env[62383]: WARNING nova.compute.manager [req-8fcfc329-284d-4e8e-99d1-f7ae26641beb req-bcca57ec-0e3f-42fe-9928-0cc73062a0c9 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received unexpected event network-vif-plugged-66cdcb95-1241-47bf-829d-bbcea0032500 for instance with vm_state active and task_state None. [ 1193.372763] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1193.373560] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50447e99-e063-4afc-8d81-28f2476b52d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.382824] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1193.382824] env[62383]: value = "task-2452518" [ 1193.382824] env[62383]: _type = "Task" [ 1193.382824] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.392364] env[62383]: INFO nova.compute.manager [None req-ef26baaf-f9db-480a-97bb-185f81b5b29e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Get console output [ 1193.392767] env[62383]: WARNING nova.virt.vmwareapi.driver [None req-ef26baaf-f9db-480a-97bb-185f81b5b29e tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] The console log is missing. Check your VSPC configuration [ 1193.396899] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.475224] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquiring lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.475464] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.486919] env[62383]: DEBUG nova.compute.manager [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1193.487118] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1193.488239] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3292b7fe-8250-4f31-8cda-1d3a4ad8333a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.498039] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.498374] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27f4f659-eb82-44a7-af28-702c0188b179 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.505694] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1193.505694] env[62383]: value = "task-2452519" [ 1193.505694] env[62383]: _type = "Task" [ 1193.505694] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.515663] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452519, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.893824] env[62383]: DEBUG oslo_vmware.api [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452518, 'name': PowerOnVM_Task, 'duration_secs': 0.391765} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.894118] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1193.894309] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-07202987-e293-4549-a278-cca9b065d80b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance 'f7584d2c-5add-4764-9aed-22f7d1674854' progress to 100 {{(pid=62383) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1193.916072] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-66cdcb95-1241-47bf-829d-bbcea0032500" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.916325] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-66cdcb95-1241-47bf-829d-bbcea0032500" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.977810] env[62383]: DEBUG nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1194.017938] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452519, 'name': PowerOffVM_Task, 'duration_secs': 0.273056} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.018212] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.018379] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1194.018620] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80dc1a68-5f9f-4db5-9640-71b26cb5c338 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.090813] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1194.091107] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1194.091310] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleting the datastore file [datastore2] b6ed40a8-674f-4179-8642-848ab0a2d31b {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.091606] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65e27a8b-481e-43ca-b57f-98bd02b0794d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.099179] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for the task: (returnval){ [ 1194.099179] env[62383]: value = "task-2452521" [ 1194.099179] env[62383]: _type = "Task" [ 1194.099179] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.107860] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452521, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.384122] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "89033750-629f-4ddb-a309-56d50f798a8d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.384415] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.419037] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1194.419261] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.420295] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d239dfb-9b4c-4dd2-b686-171231e043da {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.438960] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f335c247-ead1-45d6-880e-c5c0ca4c8a63 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.464828] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Reconfiguring VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1194.465126] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecb97779-ee79-4a58-ad00-9889a660e192 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.488322] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1194.488322] env[62383]: value = "task-2452522" [ 1194.488322] env[62383]: _type = "Task" [ 1194.488322] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.497344] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.501103] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.501103] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.502640] env[62383]: INFO nova.compute.claims [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.613444] env[62383]: DEBUG oslo_vmware.api [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Task: {'id': task-2452521, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173638} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.613685] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.613886] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1194.614075] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1194.614254] env[62383]: INFO nova.compute.manager [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1194.614495] env[62383]: DEBUG oslo.service.loopingcall [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1194.614705] env[62383]: DEBUG nova.compute.manager [-] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1194.614800] env[62383]: DEBUG nova.network.neutron [-] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1194.888018] env[62383]: DEBUG nova.compute.utils [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1194.998017] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.146318] env[62383]: DEBUG nova.compute.manager [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-changed-66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1195.146525] env[62383]: DEBUG nova.compute.manager [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing instance network info cache due to event network-changed-66cdcb95-1241-47bf-829d-bbcea0032500. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1195.146742] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1195.146924] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.147112] env[62383]: DEBUG nova.network.neutron [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing network info cache for port 66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1195.390704] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1195.418541] env[62383]: DEBUG nova.network.neutron [-] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.499514] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.675802] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebe24dd-795e-4178-939b-b0aced915feb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.684978] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b0de93-9a93-4671-a4ab-cb466e22afac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.719021] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3baf00d-4aa4-4f33-8688-565c5be2c8b5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.727164] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fbfe2c-3137-44d7-a459-992c5e5990e0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.741604] env[62383]: DEBUG nova.compute.provider_tree [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.911498] env[62383]: DEBUG nova.network.neutron [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updated VIF entry in instance network info cache for port 66cdcb95-1241-47bf-829d-bbcea0032500. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1195.912045] env[62383]: DEBUG nova.network.neutron [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "66cdcb95-1241-47bf-829d-bbcea0032500", "address": "fa:16:3e:a9:d7:47", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cdcb95-12", "ovs_interfaceid": "66cdcb95-1241-47bf-829d-bbcea0032500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.920377] env[62383]: INFO nova.compute.manager [-] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Took 1.31 seconds to deallocate network for instance. [ 1196.000265] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.009465] env[62383]: DEBUG nova.network.neutron [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Port fbc86f1d-2da8-4092-baac-7867624b1100 binding to destination host cpu-1 is already ACTIVE {{(pid=62383) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1196.009727] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.009903] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.010117] env[62383]: DEBUG nova.network.neutron [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.247594] env[62383]: DEBUG nova.scheduler.client.report [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1196.414628] env[62383]: DEBUG oslo_concurrency.lockutils [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.414995] env[62383]: DEBUG nova.compute.manager [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Received event network-vif-deleted-cbb2fb29-a758-4def-88a8-416db1bb8301 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1196.415204] env[62383]: INFO nova.compute.manager [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Neutron deleted interface cbb2fb29-a758-4def-88a8-416db1bb8301; detaching it from the instance and deleting it from the info cache [ 1196.415385] env[62383]: DEBUG nova.network.neutron [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.427517] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.461758] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "89033750-629f-4ddb-a309-56d50f798a8d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.462107] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.462371] env[62383]: INFO nova.compute.manager [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Attaching volume 7056d27b-468a-48c9-b79c-0a478eafadfc to /dev/sdb [ 1196.495436] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152a54e1-85bf-4913-addb-6db5c6f57993 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.504712] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.506847] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639ddc6c-5ad3-4d17-b46b-018ed526ecf6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.524673] env[62383]: DEBUG nova.virt.block_device [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updating existing volume attachment record: 897e716d-c86e-499c-b17c-086863894da8 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1196.752890] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.753444] env[62383]: DEBUG nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1196.756807] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.329s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.757080] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.793726] env[62383]: INFO nova.scheduler.client.report [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Deleted allocations for instance b6ed40a8-674f-4179-8642-848ab0a2d31b [ 1196.917789] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e48648b0-63fc-4dc3-ac30-82022c125f6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.928611] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11a6b4a-28ad-4eea-a422-5f635cc432e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.962377] env[62383]: DEBUG nova.compute.manager [req-e6ac19d0-bb17-4387-b225-75ac9d6cc9f9 req-8ab87ccb-ff60-42d2-b1c4-5e9a771b664e service nova] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Detach interface failed, port_id=cbb2fb29-a758-4def-88a8-416db1bb8301, reason: Instance b6ed40a8-674f-4179-8642-848ab0a2d31b could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1196.976241] env[62383]: DEBUG nova.network.neutron [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.000361] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.261916] env[62383]: DEBUG nova.compute.utils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1197.263384] env[62383]: DEBUG nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1197.263560] env[62383]: DEBUG nova.network.neutron [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1197.299559] env[62383]: DEBUG nova.policy [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6a4eb3a382944bc8556b4a080c9b38f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd895c57c68f2427fb5da3b4c2866b0c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1197.309194] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d269a66f-dc04-482d-95da-2d49fd3c9291 tempest-DeleteServersTestJSON-465293751 tempest-DeleteServersTestJSON-465293751-project-member] Lock "b6ed40a8-674f-4179-8642-848ab0a2d31b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.330s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.478525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.500851] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.558487] env[62383]: DEBUG nova.network.neutron [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Successfully created port: 58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1197.767470] env[62383]: DEBUG nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1197.981656] env[62383]: DEBUG nova.compute.manager [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62383) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1197.982060] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.982139] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.002694] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.272560] env[62383]: INFO nova.virt.block_device [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Booting with volume 6e38b46a-eb2c-4e2f-a981-9969c1d37f07 at /dev/sda [ 1198.302616] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-decbe5e7-901d-4abf-97ee-41a36a9e0a09 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.313025] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fa590d-d92b-4817-a020-441fe8f8b90f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.344252] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e40252c-b57b-4407-9d4e-5b5393dfb0aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.352590] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb51df9-21bc-4cea-a573-353fba79a7a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.383844] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ffdc15-f524-4d83-b189-8ae175d13a0c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.390502] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b520d34b-2adc-42dd-931c-ed91d26ec082 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.404193] env[62383]: DEBUG nova.virt.block_device [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Updating existing volume attachment record: f6cac44b-5a3d-4aaf-aad2-072974059dac {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1198.485321] env[62383]: DEBUG nova.objects.instance [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'migration_context' on Instance uuid f7584d2c-5add-4764-9aed-22f7d1674854 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1198.502333] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.966035] env[62383]: DEBUG nova.compute.manager [req-0d8035f7-1e0b-4ec8-9840-620f6ca1a6a7 req-a06f134c-d8f5-4f2f-9749-28c5cf9a3bd9 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Received event network-vif-plugged-58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1198.966273] env[62383]: DEBUG oslo_concurrency.lockutils [req-0d8035f7-1e0b-4ec8-9840-620f6ca1a6a7 req-a06f134c-d8f5-4f2f-9749-28c5cf9a3bd9 service nova] Acquiring lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1198.966481] env[62383]: DEBUG oslo_concurrency.lockutils [req-0d8035f7-1e0b-4ec8-9840-620f6ca1a6a7 req-a06f134c-d8f5-4f2f-9749-28c5cf9a3bd9 service nova] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1198.966645] env[62383]: DEBUG oslo_concurrency.lockutils [req-0d8035f7-1e0b-4ec8-9840-620f6ca1a6a7 req-a06f134c-d8f5-4f2f-9749-28c5cf9a3bd9 service nova] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.966807] env[62383]: DEBUG nova.compute.manager [req-0d8035f7-1e0b-4ec8-9840-620f6ca1a6a7 req-a06f134c-d8f5-4f2f-9749-28c5cf9a3bd9 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] No waiting events found dispatching network-vif-plugged-58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1198.966969] env[62383]: WARNING nova.compute.manager [req-0d8035f7-1e0b-4ec8-9840-620f6ca1a6a7 req-a06f134c-d8f5-4f2f-9749-28c5cf9a3bd9 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Received unexpected event network-vif-plugged-58a9319d-b343-4caf-904d-91af9410d121 for instance with vm_state building and task_state block_device_mapping. [ 1199.003989] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.053935] env[62383]: DEBUG nova.network.neutron [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Successfully updated port: 58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1199.116227] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502f7941-ea00-4466-8fd0-7cc8333636cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.124116] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0029ef89-4949-4fe2-95a0-572fc9a71495 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.154199] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f125dddb-5e0a-4eaa-8c0d-e9d029a59c97 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.161365] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210da38c-7096-4d54-8140-7d18913bd74b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.176265] env[62383]: DEBUG nova.compute.provider_tree [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.503787] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.559413] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquiring lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1199.559647] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquired lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.559776] env[62383]: DEBUG nova.network.neutron [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1199.679623] env[62383]: DEBUG nova.scheduler.client.report [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.004405] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.091115] env[62383]: DEBUG nova.network.neutron [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1200.230703] env[62383]: DEBUG nova.network.neutron [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Updating instance_info_cache with network_info: [{"id": "58a9319d-b343-4caf-904d-91af9410d121", "address": "fa:16:3e:69:75:d7", "network": {"id": "e4b16357-c5f6-4500-a798-3f730c27a39a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-860166877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d895c57c68f2427fb5da3b4c2866b0c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a9319d-b3", "ovs_interfaceid": "58a9319d-b343-4caf-904d-91af9410d121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.485663] env[62383]: DEBUG nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1200.486344] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1200.486627] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1200.486797] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1200.486990] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1200.487156] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1200.487308] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1200.487515] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1200.487676] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1200.487846] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1200.488039] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1200.488267] env[62383]: DEBUG nova.virt.hardware [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1200.489176] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a0b833-92d5-4a34-b8ba-feb29e884a87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.501965] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79d3180-ed8c-4efc-b141-e24df3e0a406 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.512245] env[62383]: DEBUG oslo_vmware.api [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452522, 'name': ReconfigVM_Task, 'duration_secs': 5.783393} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.520360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.520589] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Reconfigured VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1200.691812] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.710s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.733553] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Releasing lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.733721] env[62383]: DEBUG nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance network_info: |[{"id": "58a9319d-b343-4caf-904d-91af9410d121", "address": "fa:16:3e:69:75:d7", "network": {"id": "e4b16357-c5f6-4500-a798-3f730c27a39a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-860166877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d895c57c68f2427fb5da3b4c2866b0c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a9319d-b3", "ovs_interfaceid": "58a9319d-b343-4caf-904d-91af9410d121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1200.735615] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:75:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4345ef6-a7c8-4c1c-badf-a0d4f578b61c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '58a9319d-b343-4caf-904d-91af9410d121', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1200.743233] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Creating folder: Project (d895c57c68f2427fb5da3b4c2866b0c4). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1200.744146] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa5f16f7-b715-461b-99d8-99b0452c08cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.761096] env[62383]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1200.761406] env[62383]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62383) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1200.762203] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Folder already exists: Project (d895c57c68f2427fb5da3b4c2866b0c4). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1200.762441] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Creating folder: Instances. Parent ref: group-v496611. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1200.762707] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1dd7ce7-5f19-44dd-9f67-edd1dea7ba0f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.774712] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Created folder: Instances in parent group-v496611. [ 1200.774976] env[62383]: DEBUG oslo.service.loopingcall [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1200.775195] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1200.775404] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5a2469f-5c2f-4cd0-a296-e5f969690676 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.797076] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1200.797076] env[62383]: value = "task-2452528" [ 1200.797076] env[62383]: _type = "Task" [ 1200.797076] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.806037] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452528, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.992665] env[62383]: DEBUG nova.compute.manager [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Received event network-changed-58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1200.992874] env[62383]: DEBUG nova.compute.manager [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Refreshing instance network info cache due to event network-changed-58a9319d-b343-4caf-904d-91af9410d121. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1200.993138] env[62383]: DEBUG oslo_concurrency.lockutils [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] Acquiring lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1200.993292] env[62383]: DEBUG oslo_concurrency.lockutils [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] Acquired lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.993454] env[62383]: DEBUG nova.network.neutron [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Refreshing network info cache for port 58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1201.074163] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1201.074163] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496616', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'name': 'volume-7056d27b-468a-48c9-b79c-0a478eafadfc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '89033750-629f-4ddb-a309-56d50f798a8d', 'attached_at': '', 'detached_at': '', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'serial': '7056d27b-468a-48c9-b79c-0a478eafadfc'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1201.074163] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c263e7d4-b1f9-44d3-b4bc-871da8538767 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.093966] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611c8c0e-bc05-4ff1-b59f-0c30749c1ab2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.122020] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-7056d27b-468a-48c9-b79c-0a478eafadfc/volume-7056d27b-468a-48c9-b79c-0a478eafadfc.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1201.122020] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8158dc3b-2c5e-4ffc-9967-fd7cc57d0329 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.140942] env[62383]: DEBUG oslo_vmware.api [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1201.140942] env[62383]: value = "task-2452529" [ 1201.140942] env[62383]: _type = "Task" [ 1201.140942] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.149496] env[62383]: DEBUG oslo_vmware.api [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452529, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.307738] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452528, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.653935] env[62383]: DEBUG oslo_vmware.api [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452529, 'name': ReconfigVM_Task, 'duration_secs': 0.436592} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.654783] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-7056d27b-468a-48c9-b79c-0a478eafadfc/volume-7056d27b-468a-48c9-b79c-0a478eafadfc.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1201.659802] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b154bd1e-a56f-4ac8-bdcd-a2ec9ef31ffb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.676553] env[62383]: DEBUG oslo_vmware.api [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1201.676553] env[62383]: value = "task-2452530" [ 1201.676553] env[62383]: _type = "Task" [ 1201.676553] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.685242] env[62383]: DEBUG oslo_vmware.api [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.754509] env[62383]: DEBUG nova.network.neutron [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Updated VIF entry in instance network info cache for port 58a9319d-b343-4caf-904d-91af9410d121. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1201.755190] env[62383]: DEBUG nova.network.neutron [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Updating instance_info_cache with network_info: [{"id": "58a9319d-b343-4caf-904d-91af9410d121", "address": "fa:16:3e:69:75:d7", "network": {"id": "e4b16357-c5f6-4500-a798-3f730c27a39a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-860166877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d895c57c68f2427fb5da3b4c2866b0c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a9319d-b3", "ovs_interfaceid": "58a9319d-b343-4caf-904d-91af9410d121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.809367] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452528, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.896601] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1201.896798] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.896980] env[62383]: DEBUG nova.network.neutron [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1202.187271] env[62383]: DEBUG oslo_vmware.api [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452530, 'name': ReconfigVM_Task, 'duration_secs': 0.147135} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.187592] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496616', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'name': 'volume-7056d27b-468a-48c9-b79c-0a478eafadfc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '89033750-629f-4ddb-a309-56d50f798a8d', 'attached_at': '', 'detached_at': '', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'serial': '7056d27b-468a-48c9-b79c-0a478eafadfc'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1202.248113] env[62383]: INFO nova.compute.manager [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Swapping old allocation on dict_keys(['60615f54-0557-436e-a486-87505bffb4c7']) held by migration fc9ebb27-6dc4-471c-b07f-8614cc1d8654 for instance [ 1202.259885] env[62383]: DEBUG oslo_concurrency.lockutils [req-387b18e8-4d8a-4b5c-b263-62a7e16d2aec req-0cd3955b-a15c-4c6f-ab25-f6f2d3850dab service nova] Releasing lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1202.270394] env[62383]: DEBUG nova.scheduler.client.report [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Overwriting current allocation {'allocations': {'60615f54-0557-436e-a486-87505bffb4c7': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 177}}, 'project_id': 'e320302a6b1e466e887c787006413dec', 'user_id': 'e94f486c637c4b9f8c3cfa649688a603', 'consumer_generation': 1} on consumer f7584d2c-5add-4764-9aed-22f7d1674854 {{(pid=62383) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1202.313858] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452528, 'name': CreateVM_Task, 'duration_secs': 1.395759} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.314245] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1202.315126] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'f6cac44b-5a3d-4aaf-aad2-072974059dac', 'device_type': None, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496615', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'name': 'volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '991d6eb0-94e7-4d7c-bd85-3c8ef28daa49', 'attached_at': '', 'detached_at': '', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'serial': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07'}, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62383) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1202.315530] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Root volume attach. Driver type: vmdk {{(pid=62383) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1202.316571] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0d89da-05eb-46c8-a7aa-7fc6be3f7e9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.325576] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b58e68-8568-46ad-93cb-05ef58b4703a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.333308] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0c359e-4a44-469a-b3f9-911dade69032 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.342239] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-7613e26d-0370-4eb7-bcca-69f242f716a4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.356068] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1202.356068] env[62383]: value = "task-2452531" [ 1202.356068] env[62383]: _type = "Task" [ 1202.356068] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.363675] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.403155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.403155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.403155] env[62383]: DEBUG nova.network.neutron [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1202.764738] env[62383]: INFO nova.network.neutron [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Port 66cdcb95-1241-47bf-829d-bbcea0032500 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1202.766041] env[62383]: DEBUG nova.network.neutron [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.871217] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 43%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.040174] env[62383]: DEBUG nova.compute.manager [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1203.040385] env[62383]: DEBUG nova.compute.manager [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing instance network info cache due to event network-changed-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1203.040582] env[62383]: DEBUG oslo_concurrency.lockutils [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] Acquiring lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.158676] env[62383]: DEBUG nova.network.neutron [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [{"id": "fbc86f1d-2da8-4092-baac-7867624b1100", "address": "fa:16:3e:59:29:5c", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbc86f1d-2d", "ovs_interfaceid": "fbc86f1d-2da8-4092-baac-7867624b1100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.255519] env[62383]: DEBUG nova.objects.instance [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'flavor' on Instance uuid 89033750-629f-4ddb-a309-56d50f798a8d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.269665] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.272566] env[62383]: DEBUG oslo_concurrency.lockutils [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] Acquired lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.272969] env[62383]: DEBUG nova.network.neutron [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Refreshing network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.367439] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 56%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.578729] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-a4e3b5a2-98c2-4376-bafd-49ccee64b262-66cdcb95-1241-47bf-829d-bbcea0032500" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.579501] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-a4e3b5a2-98c2-4376-bafd-49ccee64b262-66cdcb95-1241-47bf-829d-bbcea0032500" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.579858] env[62383]: DEBUG nova.objects.instance [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'flavor' on Instance uuid a4e3b5a2-98c2-4376-bafd-49ccee64b262 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1203.661730] env[62383]: DEBUG oslo_concurrency.lockutils [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-f7584d2c-5add-4764-9aed-22f7d1674854" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.662395] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1203.662727] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7f8f7cb-ff88-44c4-b080-d74cb35be9b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.671708] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1203.671708] env[62383]: value = "task-2452532" [ 1203.671708] env[62383]: _type = "Task" [ 1203.671708] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.681275] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452532, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.761730] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bbfd68ec-65f6-441b-8b6e-6f7ad3345eaa tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.300s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.775374] env[62383]: DEBUG oslo_concurrency.lockutils [None req-310124e6-a149-4f6c-bad8-ca0f645f6262 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-66cdcb95-1241-47bf-829d-bbcea0032500" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.859s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.867011] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 69%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.009458] env[62383]: DEBUG nova.network.neutron [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updated VIF entry in instance network info cache for port 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.009458] env[62383]: DEBUG nova.network.neutron [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [{"id": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "address": "fa:16:3e:25:d6:e3", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap633f7cbf-4b", "ovs_interfaceid": "633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.091928] env[62383]: DEBUG oslo_concurrency.lockutils [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "89033750-629f-4ddb-a309-56d50f798a8d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1204.091928] env[62383]: DEBUG oslo_concurrency.lockutils [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.183039] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452532, 'name': PowerOffVM_Task, 'duration_secs': 0.252693} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.183347] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1204.184065] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.184297] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.184458] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.184644] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.184803] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.184984] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.185222] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.185387] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.185552] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.185716] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.185893] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.191703] env[62383]: DEBUG nova.objects.instance [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'pci_requests' on Instance uuid a4e3b5a2-98c2-4376-bafd-49ccee64b262 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1204.192797] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7dd4ea21-1c28-48ad-9fba-4f62b34c248f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.210566] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1204.210566] env[62383]: value = "task-2452533" [ 1204.210566] env[62383]: _type = "Task" [ 1204.210566] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.221591] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452533, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.368424] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 84%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.511534] env[62383]: DEBUG oslo_concurrency.lockutils [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] Releasing lock "refresh_cache-ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.511887] env[62383]: DEBUG nova.compute.manager [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1204.512083] env[62383]: DEBUG nova.compute.manager [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing instance network info cache due to event network-changed-7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1204.512311] env[62383]: DEBUG oslo_concurrency.lockutils [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.512469] env[62383]: DEBUG oslo_concurrency.lockutils [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.512699] env[62383]: DEBUG nova.network.neutron [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.595636] env[62383]: INFO nova.compute.manager [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Detaching volume 7056d27b-468a-48c9-b79c-0a478eafadfc [ 1204.632515] env[62383]: INFO nova.virt.block_device [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Attempting to driver detach volume 7056d27b-468a-48c9-b79c-0a478eafadfc from mountpoint /dev/sdb [ 1204.632767] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1204.632958] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496616', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'name': 'volume-7056d27b-468a-48c9-b79c-0a478eafadfc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '89033750-629f-4ddb-a309-56d50f798a8d', 'attached_at': '', 'detached_at': '', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'serial': '7056d27b-468a-48c9-b79c-0a478eafadfc'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1204.633949] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d939fb6-5af0-4d5d-b67c-380fda65e5d6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.659397] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959a3322-2e58-410a-a5fb-4542189b1d66 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.667279] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791bea82-e0c9-4dba-b7f0-82dd8a693d4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.688744] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ac8193-9411-4e19-9b47-76fde23c3525 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.704119] env[62383]: DEBUG nova.objects.base [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1204.704335] env[62383]: DEBUG nova.network.neutron [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1204.706477] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] The volume has not been displaced from its original location: [datastore2] volume-7056d27b-468a-48c9-b79c-0a478eafadfc/volume-7056d27b-468a-48c9-b79c-0a478eafadfc.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1204.711744] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1204.712110] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de119f05-dc7c-41ae-875e-6538d3b7ecee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.737174] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452533, 'name': ReconfigVM_Task, 'duration_secs': 0.16106} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.739321] env[62383]: DEBUG oslo_vmware.api [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1204.739321] env[62383]: value = "task-2452534" [ 1204.739321] env[62383]: _type = "Task" [ 1204.739321] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.739499] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0046d7e7-1b10-4d31-9ba5-1e29b7b7cce9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.750719] env[62383]: DEBUG oslo_vmware.api [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452534, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.765517] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1204.765855] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1204.766048] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1204.766271] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1204.766557] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1204.766655] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1204.766868] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1204.767047] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1204.767222] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1204.767390] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1204.767562] env[62383]: DEBUG nova.virt.hardware [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1204.770585] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-756378c7-c5a2-48ec-abe3-d08d0b52bc25 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.776996] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1204.776996] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b2b9a0-2cc9-8d60-7d8a-25a218c47fa8" [ 1204.776996] env[62383]: _type = "Task" [ 1204.776996] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.785197] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b2b9a0-2cc9-8d60-7d8a-25a218c47fa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.809777] env[62383]: DEBUG nova.policy [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7825487398fc47b5aa690bed357e4448', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba559fb5da01474791c2408ca92bbff6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1204.867481] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 97%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.230288] env[62383]: DEBUG nova.network.neutron [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updated VIF entry in instance network info cache for port 7f9cebec-50e0-428c-a19c-f39af2719a65. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1205.230701] env[62383]: DEBUG nova.network.neutron [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.239805] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.240046] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.252276] env[62383]: DEBUG oslo_vmware.api [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452534, 'name': ReconfigVM_Task, 'duration_secs': 0.346126} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.253053] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1205.257842] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dea67aaf-c0da-4739-b59a-68d714494c96 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.274810] env[62383]: DEBUG oslo_vmware.api [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1205.274810] env[62383]: value = "task-2452535" [ 1205.274810] env[62383]: _type = "Task" [ 1205.274810] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.288792] env[62383]: DEBUG oslo_vmware.api [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452535, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.292209] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b2b9a0-2cc9-8d60-7d8a-25a218c47fa8, 'name': SearchDatastore_Task, 'duration_secs': 0.008481} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.297745] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1205.298370] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e78b104d-cc51-49d8-9471-565345d5c694 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.317022] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1205.317022] env[62383]: value = "task-2452536" [ 1205.317022] env[62383]: _type = "Task" [ 1205.317022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.325568] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452536, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.369176] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 97%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.733638] env[62383]: DEBUG oslo_concurrency.lockutils [req-2954a1b1-9add-4807-9070-0616b0ed6e12 req-8a32ab64-129a-4b5c-9d94-9e60487e06ad service nova] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.742961] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.743228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.743399] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.743555] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1205.744495] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5db3d3-7326-439e-933e-046f24a75eb3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.753200] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d25e921-2ed9-4962-980e-8cd0f21ed903 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.769202] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb9868f-d21a-4605-a271-769a1979acad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.779278] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f676efbe-f639-4a81-9f00-2d6b54aa20ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.788704] env[62383]: DEBUG oslo_vmware.api [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452535, 'name': ReconfigVM_Task, 'duration_secs': 0.302899} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.815106] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496616', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'name': 'volume-7056d27b-468a-48c9-b79c-0a478eafadfc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '89033750-629f-4ddb-a309-56d50f798a8d', 'attached_at': '', 'detached_at': '', 'volume_id': '7056d27b-468a-48c9-b79c-0a478eafadfc', 'serial': '7056d27b-468a-48c9-b79c-0a478eafadfc'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1205.817517] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180303MB free_disk=145GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1205.817662] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.817852] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.828372] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452536, 'name': ReconfigVM_Task, 'duration_secs': 0.384642} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.828694] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1205.829483] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c25675-1352-4184-a4b7-128376a7b1aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.857457] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1205.857769] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-018500ae-140f-4f8e-9236-60a547d6aab5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.883235] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task} progress is 98%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.884901] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1205.884901] env[62383]: value = "task-2452537" [ 1205.884901] env[62383]: _type = "Task" [ 1205.884901] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.205890] env[62383]: DEBUG nova.compute.manager [req-eaaea10f-0eba-46a5-acef-91223e6faeaf req-97277d80-2c0f-4bd5-8c43-03c59327fb53 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-vif-plugged-66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1206.206125] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaaea10f-0eba-46a5-acef-91223e6faeaf req-97277d80-2c0f-4bd5-8c43-03c59327fb53 service nova] Acquiring lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.206335] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaaea10f-0eba-46a5-acef-91223e6faeaf req-97277d80-2c0f-4bd5-8c43-03c59327fb53 service nova] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.206506] env[62383]: DEBUG oslo_concurrency.lockutils [req-eaaea10f-0eba-46a5-acef-91223e6faeaf req-97277d80-2c0f-4bd5-8c43-03c59327fb53 service nova] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.206669] env[62383]: DEBUG nova.compute.manager [req-eaaea10f-0eba-46a5-acef-91223e6faeaf req-97277d80-2c0f-4bd5-8c43-03c59327fb53 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] No waiting events found dispatching network-vif-plugged-66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1206.206833] env[62383]: WARNING nova.compute.manager [req-eaaea10f-0eba-46a5-acef-91223e6faeaf req-97277d80-2c0f-4bd5-8c43-03c59327fb53 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received unexpected event network-vif-plugged-66cdcb95-1241-47bf-829d-bbcea0032500 for instance with vm_state active and task_state None. [ 1206.287889] env[62383]: DEBUG nova.network.neutron [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Successfully updated port: 66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1206.357966] env[62383]: DEBUG nova.objects.instance [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'flavor' on Instance uuid 89033750-629f-4ddb-a309-56d50f798a8d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.383061] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452531, 'name': RelocateVM_Task, 'duration_secs': 3.738898} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.383423] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1206.383543] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496615', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'name': 'volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '991d6eb0-94e7-4d7c-bd85-3c8ef28daa49', 'attached_at': '', 'detached_at': '', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'serial': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1206.384335] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b9cfcb-8fd0-462d-9a49-bd154fd8e54c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.394244] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452537, 'name': ReconfigVM_Task, 'duration_secs': 0.31737} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.404596] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Reconfigured VM instance instance-00000070 to attach disk [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854/f7584d2c-5add-4764-9aed-22f7d1674854.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1206.405461] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ab9366-ec72-43f3-851b-534c82cb695d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.408104] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63f3ab0-54dc-4f5a-884c-cfa68d77ba90 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.435027] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4fff53-725a-42ff-8c64-bd67a4792b7a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.445410] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07/volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.445938] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b26267f-9520-4066-a6c6-f392dfd32a72 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.479532] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc82a1e4-19a1-4c09-b594-6d14ca853551 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.483668] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1206.483668] env[62383]: value = "task-2452538" [ 1206.483668] env[62383]: _type = "Task" [ 1206.483668] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.501182] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab9675a-cc06-410e-8c29-d4696afe0ec7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.507789] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.512220] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1206.512541] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-011c4412-6107-4cff-8134-9583b607d30c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.519455] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1206.519455] env[62383]: value = "task-2452539" [ 1206.519455] env[62383]: _type = "Task" [ 1206.519455] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.527631] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452539, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.791616] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1206.791616] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.792083] env[62383]: DEBUG nova.network.neutron [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1206.847658] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 1b025655-acad-4b70-9e1a-489683cafb7e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.847907] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance c94e9a83-04de-4144-ab6e-d96dc7c39e6d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.847974] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 89033750-629f-4ddb-a309-56d50f798a8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.848127] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.848286] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a4e3b5a2-98c2-4376-bafd-49ccee64b262 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.848408] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance b8e512cd-5eb9-423c-9447-833e34909bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.848526] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 12843fba-0240-44fb-9687-d34a6333011b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.848640] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance f7584d2c-5add-4764-9aed-22f7d1674854 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.848751] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.848952] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1206.849106] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1206.978417] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb1ab8b-2a27-483d-aedb-cbd648c89ca3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.989790] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7871c28b-72cc-49b9-aa52-bd5b24d12cf6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.995414] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452538, 'name': ReconfigVM_Task, 'duration_secs': 0.34224} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.995992] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Reconfigured VM instance instance-00000077 to attach disk [datastore2] volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07/volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1207.025935] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-772f145e-f67b-43bc-b2e7-8c110b86f96a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.040449] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53bea8b-be2d-4d56-a0ad-185365c826d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.053901] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b5a110-659c-4a34-8775-99857824b434 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.057877] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1207.057877] env[62383]: value = "task-2452540" [ 1207.057877] env[62383]: _type = "Task" [ 1207.057877] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.058158] env[62383]: DEBUG oslo_vmware.api [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452539, 'name': PowerOnVM_Task, 'duration_secs': 0.390516} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.058481] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1207.075087] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.081286] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.327334] env[62383]: WARNING nova.network.neutron [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] ce80ab32-a193-40db-be36-b8085e20a4c5 already exists in list: networks containing: ['ce80ab32-a193-40db-be36-b8085e20a4c5']. ignoring it [ 1207.365773] env[62383]: DEBUG oslo_concurrency.lockutils [None req-684910b3-94ed-41d9-90d8-042551e85a78 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.274s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1207.572442] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452540, 'name': ReconfigVM_Task, 'duration_secs': 0.382724} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.572763] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496615', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'name': 'volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '991d6eb0-94e7-4d7c-bd85-3c8ef28daa49', 'attached_at': '', 'detached_at': '', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'serial': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1207.575544] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bad345f-02c7-4f1f-8714-0e48a0684e19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.576830] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.580818] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1207.580818] env[62383]: value = "task-2452541" [ 1207.580818] env[62383]: _type = "Task" [ 1207.580818] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.590733] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452541, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.598370] env[62383]: DEBUG nova.network.neutron [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "66cdcb95-1241-47bf-829d-bbcea0032500", "address": "fa:16:3e:a9:d7:47", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cdcb95-12", "ovs_interfaceid": "66cdcb95-1241-47bf-829d-bbcea0032500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.075156] env[62383]: INFO nova.compute.manager [None req-9ae0f7b4-9fe2-4e1a-94d1-28b15d3a3d67 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance to original state: 'active' [ 1208.081046] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1208.081238] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.263s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.090693] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452541, 'name': Rename_Task, 'duration_secs': 0.140426} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.090962] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1208.091204] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f47b8d6-5a76-42ed-b5ed-dabc50de5b37 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.098067] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1208.098067] env[62383]: value = "task-2452542" [ 1208.098067] env[62383]: _type = "Task" [ 1208.098067] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.101373] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1208.101970] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.102144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.103129] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f4ca94-2b4b-455e-a840-61f0b202b7ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.113358] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.125499] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1208.125765] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1208.125928] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1208.126131] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1208.126287] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1208.126442] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1208.126651] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1208.126813] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1208.126978] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1208.127158] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1208.127332] env[62383]: DEBUG nova.virt.hardware [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1208.133564] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Reconfiguring VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1208.134263] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8a2e103-35a7-47db-8ef2-9be5e3e6192e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.155623] env[62383]: DEBUG oslo_vmware.api [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1208.155623] env[62383]: value = "task-2452543" [ 1208.155623] env[62383]: _type = "Task" [ 1208.155623] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.166745] env[62383]: DEBUG oslo_vmware.api [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452543, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.232410] env[62383]: DEBUG nova.compute.manager [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-changed-66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1208.232608] env[62383]: DEBUG nova.compute.manager [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing instance network info cache due to event network-changed-66cdcb95-1241-47bf-829d-bbcea0032500. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1208.232817] env[62383]: DEBUG oslo_concurrency.lockutils [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1208.232961] env[62383]: DEBUG oslo_concurrency.lockutils [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.233139] env[62383]: DEBUG nova.network.neutron [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Refreshing network info cache for port 66cdcb95-1241-47bf-829d-bbcea0032500 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1208.530338] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "89033750-629f-4ddb-a309-56d50f798a8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.530642] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.530865] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "89033750-629f-4ddb-a309-56d50f798a8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.531074] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.531262] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1208.534287] env[62383]: INFO nova.compute.manager [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Terminating instance [ 1208.609697] env[62383]: DEBUG oslo_vmware.api [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452542, 'name': PowerOnVM_Task, 'duration_secs': 0.47233} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.610140] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1208.610358] env[62383]: INFO nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Took 8.12 seconds to spawn the instance on the hypervisor. [ 1208.610547] env[62383]: DEBUG nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1208.611430] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bf7d88-e472-4e29-91bf-0e3f24c26561 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.666446] env[62383]: DEBUG oslo_vmware.api [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452543, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.928142] env[62383]: DEBUG nova.network.neutron [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updated VIF entry in instance network info cache for port 66cdcb95-1241-47bf-829d-bbcea0032500. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1208.928588] env[62383]: DEBUG nova.network.neutron [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "66cdcb95-1241-47bf-829d-bbcea0032500", "address": "fa:16:3e:a9:d7:47", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66cdcb95-12", "ovs_interfaceid": "66cdcb95-1241-47bf-829d-bbcea0032500", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.039274] env[62383]: DEBUG nova.compute.manager [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1209.039550] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1209.040922] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c47f8e-7172-4cda-b82f-419796727018 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.050907] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1209.051211] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce7bc6e0-200c-4d67-a8f2-f495019b4e5f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.061155] env[62383]: DEBUG oslo_vmware.api [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1209.061155] env[62383]: value = "task-2452544" [ 1209.061155] env[62383]: _type = "Task" [ 1209.061155] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.071103] env[62383]: DEBUG oslo_vmware.api [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.076864] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.077217] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.135588] env[62383]: INFO nova.compute.manager [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Took 14.65 seconds to build instance. [ 1209.166648] env[62383]: DEBUG oslo_vmware.api [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452543, 'name': ReconfigVM_Task, 'duration_secs': 0.572048} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.167174] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.167414] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Reconfigured VM to attach interface {{(pid=62383) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1209.431391] env[62383]: DEBUG oslo_concurrency.lockutils [req-3c31b2f3-d748-45a9-9f6b-039ce29609d3 req-5ff2b58a-a4d1-4cbe-88b4-ef85cf174feb service nova] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1209.571924] env[62383]: DEBUG oslo_vmware.api [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452544, 'name': PowerOffVM_Task, 'duration_secs': 0.209033} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.572178] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1209.572362] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1209.572622] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c01ba62-db0e-4c50-8d9f-a069e4692dac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.583847] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.584137] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1209.637991] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7ef37e27-f2b1-4d8f-ad2e-3be55acb0299 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.162s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.662622] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1209.662867] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1209.663070] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleting the datastore file [datastore2] 89033750-629f-4ddb-a309-56d50f798a8d {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1209.663350] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2e2c040-2061-455f-9993-164adb5df2bd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.670675] env[62383]: DEBUG oslo_vmware.api [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for the task: (returnval){ [ 1209.670675] env[62383]: value = "task-2452546" [ 1209.670675] env[62383]: _type = "Task" [ 1209.670675] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.671817] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c30b9959-1e75-46d7-a779-ab839dbdf08a tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-a4e3b5a2-98c2-4376-bafd-49ccee64b262-66cdcb95-1241-47bf-829d-bbcea0032500" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.092s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.682588] env[62383]: DEBUG oslo_vmware.api [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.087436] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Didn't find any instances for network info cache update. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1210.087436] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.087436] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.087436] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.184108] env[62383]: DEBUG oslo_vmware.api [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Task: {'id': task-2452546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142713} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.184108] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.184108] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1210.184108] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1210.184108] env[62383]: INFO nova.compute.manager [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1210.184108] env[62383]: DEBUG oslo.service.loopingcall [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1210.184108] env[62383]: DEBUG nova.compute.manager [-] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1210.184108] env[62383]: DEBUG nova.network.neutron [-] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1210.240368] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.240368] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1210.268520] env[62383]: DEBUG nova.compute.manager [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Received event network-changed-58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1210.268520] env[62383]: DEBUG nova.compute.manager [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Refreshing instance network info cache due to event network-changed-58a9319d-b343-4caf-904d-91af9410d121. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1210.269109] env[62383]: DEBUG oslo_concurrency.lockutils [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] Acquiring lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.269400] env[62383]: DEBUG oslo_concurrency.lockutils [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] Acquired lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.269679] env[62383]: DEBUG nova.network.neutron [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Refreshing network info cache for port 58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1210.651751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "f7584d2c-5add-4764-9aed-22f7d1674854" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.651751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.651751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.651751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.651751] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.653596] env[62383]: INFO nova.compute.manager [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Terminating instance [ 1210.675279] env[62383]: DEBUG nova.compute.manager [req-4e7c8052-c551-4ae7-85db-4db136c39618 req-be97bea4-70c7-40db-94ca-d6a6a437bb7a service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Received event network-vif-deleted-158b7402-2cbc-46b2-a789-ada2ac1b29cd {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1210.675279] env[62383]: INFO nova.compute.manager [req-4e7c8052-c551-4ae7-85db-4db136c39618 req-be97bea4-70c7-40db-94ca-d6a6a437bb7a service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Neutron deleted interface 158b7402-2cbc-46b2-a789-ada2ac1b29cd; detaching it from the instance and deleting it from the info cache [ 1210.675279] env[62383]: DEBUG nova.network.neutron [req-4e7c8052-c551-4ae7-85db-4db136c39618 req-be97bea4-70c7-40db-94ca-d6a6a437bb7a service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.980379] env[62383]: DEBUG nova.network.neutron [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Updated VIF entry in instance network info cache for port 58a9319d-b343-4caf-904d-91af9410d121. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.980750] env[62383]: DEBUG nova.network.neutron [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Updating instance_info_cache with network_info: [{"id": "58a9319d-b343-4caf-904d-91af9410d121", "address": "fa:16:3e:69:75:d7", "network": {"id": "e4b16357-c5f6-4500-a798-3f730c27a39a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-860166877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.253", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d895c57c68f2427fb5da3b4c2866b0c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4345ef6-a7c8-4c1c-badf-a0d4f578b61c", "external-id": "nsx-vlan-transportzone-677", "segmentation_id": 677, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap58a9319d-b3", "ovs_interfaceid": "58a9319d-b343-4caf-904d-91af9410d121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.158853] env[62383]: DEBUG nova.network.neutron [-] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.160370] env[62383]: DEBUG nova.compute.manager [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1211.160589] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1211.161642] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad3c64d-3f46-44b2-a1aa-69a3d4f9e4bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.169929] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1211.170702] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c6d4a86-6fc2-41a9-831e-2d550bc616ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.177992] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e608b2a-8de4-4286-bb63-0b5e14b28730 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.181712] env[62383]: DEBUG oslo_vmware.api [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1211.181712] env[62383]: value = "task-2452547" [ 1211.181712] env[62383]: _type = "Task" [ 1211.181712] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.189570] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a3b1f7-44c0-4503-a3b1-eb0f4c5be13d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.203521] env[62383]: DEBUG oslo_vmware.api [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.225887] env[62383]: DEBUG nova.compute.manager [req-4e7c8052-c551-4ae7-85db-4db136c39618 req-be97bea4-70c7-40db-94ca-d6a6a437bb7a service nova] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Detach interface failed, port_id=158b7402-2cbc-46b2-a789-ada2ac1b29cd, reason: Instance 89033750-629f-4ddb-a309-56d50f798a8d could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1211.241113] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.314831] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "interface-a4e3b5a2-98c2-4376-bafd-49ccee64b262-66cdcb95-1241-47bf-829d-bbcea0032500" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.315144] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-a4e3b5a2-98c2-4376-bafd-49ccee64b262-66cdcb95-1241-47bf-829d-bbcea0032500" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1211.483772] env[62383]: DEBUG oslo_concurrency.lockutils [req-4f7b37e4-33af-47d7-9977-bda689ade17d req-e6a50473-4090-4e12-92c7-ff798d010143 service nova] Releasing lock "refresh_cache-991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1211.661790] env[62383]: INFO nova.compute.manager [-] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Took 1.48 seconds to deallocate network for instance. [ 1211.691264] env[62383]: DEBUG oslo_vmware.api [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452547, 'name': PowerOffVM_Task, 'duration_secs': 0.265723} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.691534] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1211.691698] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1211.691976] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-635dc379-25d8-4760-83dc-1e47ad6e32ff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.781434] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1211.781721] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1211.781921] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleting the datastore file [datastore2] f7584d2c-5add-4764-9aed-22f7d1674854 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1211.782193] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2329ddf-231d-4a9a-ba2d-12e5d9f3c4e2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.788731] env[62383]: DEBUG oslo_vmware.api [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1211.788731] env[62383]: value = "task-2452549" [ 1211.788731] env[62383]: _type = "Task" [ 1211.788731] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.796621] env[62383]: DEBUG oslo_vmware.api [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.819483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.819805] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.820692] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecf406d-b789-4fa0-a2c5-a39fdcff29fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.838444] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3388656b-833c-4342-97bf-74c39ed120d0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.864622] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Reconfiguring VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1211.864906] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d96f010d-03c5-486e-9921-9a1ab6cd7337 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.882865] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1211.882865] env[62383]: value = "task-2452550" [ 1211.882865] env[62383]: _type = "Task" [ 1211.882865] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.890316] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.169114] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.169114] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.169114] env[62383]: DEBUG nova.objects.instance [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lazy-loading 'resources' on Instance uuid 89033750-629f-4ddb-a309-56d50f798a8d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.298619] env[62383]: DEBUG oslo_vmware.api [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147128} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.298879] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1212.299079] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1212.299268] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1212.299440] env[62383]: INFO nova.compute.manager [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1212.299681] env[62383]: DEBUG oslo.service.loopingcall [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1212.299877] env[62383]: DEBUG nova.compute.manager [-] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1212.299972] env[62383]: DEBUG nova.network.neutron [-] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1212.392777] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.752356] env[62383]: DEBUG nova.compute.manager [req-9c43e1ab-7cdd-4a7a-839b-4bcdf5375028 req-f179af77-6eb2-433f-9982-836b1db155f8 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Received event network-vif-deleted-fbc86f1d-2da8-4092-baac-7867624b1100 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1212.752621] env[62383]: INFO nova.compute.manager [req-9c43e1ab-7cdd-4a7a-839b-4bcdf5375028 req-f179af77-6eb2-433f-9982-836b1db155f8 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Neutron deleted interface fbc86f1d-2da8-4092-baac-7867624b1100; detaching it from the instance and deleting it from the info cache [ 1212.752814] env[62383]: DEBUG nova.network.neutron [req-9c43e1ab-7cdd-4a7a-839b-4bcdf5375028 req-f179af77-6eb2-433f-9982-836b1db155f8 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.820787] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fec7d61-444d-4d10-8a4a-8c66e265fba6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.829839] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc371ee-6d64-4fee-bd61-1d743399fea3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.861934] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d54fbb1-2a85-421a-a2d9-65c927c83ba8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.869330] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ac1bbd-50ba-4a29-acd1-65ea0e90f69e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.884031] env[62383]: DEBUG nova.compute.provider_tree [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1212.893089] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.235609] env[62383]: DEBUG nova.network.neutron [-] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.255518] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e161a76-8732-4c28-8d40-116baa045c4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.266077] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc01d107-17ca-44b7-be4d-dad0be4a40c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.295033] env[62383]: DEBUG nova.compute.manager [req-9c43e1ab-7cdd-4a7a-839b-4bcdf5375028 req-f179af77-6eb2-433f-9982-836b1db155f8 service nova] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Detach interface failed, port_id=fbc86f1d-2da8-4092-baac-7867624b1100, reason: Instance f7584d2c-5add-4764-9aed-22f7d1674854 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1213.390186] env[62383]: DEBUG nova.scheduler.client.report [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1213.397030] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.739131] env[62383]: INFO nova.compute.manager [-] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Took 1.44 seconds to deallocate network for instance. [ 1213.893711] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.898521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.919340] env[62383]: INFO nova.scheduler.client.report [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Deleted allocations for instance 89033750-629f-4ddb-a309-56d50f798a8d [ 1214.245638] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.245904] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.246142] env[62383]: DEBUG nova.objects.instance [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'resources' on Instance uuid f7584d2c-5add-4764-9aed-22f7d1674854 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1214.394191] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.427849] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e479b398-ee5d-48d0-9a6c-90b373887917 tempest-AttachVolumeNegativeTest-1609342862 tempest-AttachVolumeNegativeTest-1609342862-project-member] Lock "89033750-629f-4ddb-a309-56d50f798a8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.897s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.863792] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18ee98c-3d59-45b5-9407-54d9937cddaa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.871383] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0480fbb0-6ec0-4bc9-bffa-29d331509ee0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.904898] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c8a6de-1784-44f7-8604-2b5b5884f813 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.912197] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.915471] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ade061-fce9-41f6-8ef5-c23756f2473b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.928620] env[62383]: DEBUG nova.compute.provider_tree [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.284460] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.284799] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.285062] env[62383]: INFO nova.compute.manager [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Shelving [ 1215.411069] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.432518] env[62383]: DEBUG nova.scheduler.client.report [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1215.911488] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.936821] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.691s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.960318] env[62383]: INFO nova.scheduler.client.report [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted allocations for instance f7584d2c-5add-4764-9aed-22f7d1674854 [ 1216.295160] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1216.295160] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25956883-cfbd-4f45-91de-8b6d4f3a8243 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.303126] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1216.303126] env[62383]: value = "task-2452552" [ 1216.303126] env[62383]: _type = "Task" [ 1216.303126] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.311284] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.412368] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.467834] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ee6f4304-e94c-4557-9ec3-c158be61854b tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "f7584d2c-5add-4764-9aed-22f7d1674854" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.818s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.813140] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452552, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.912437] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.314184] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452552, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.375833] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "12843fba-0240-44fb-9687-d34a6333011b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.376106] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.376346] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "12843fba-0240-44fb-9687-d34a6333011b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.376628] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.376811] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.378990] env[62383]: INFO nova.compute.manager [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Terminating instance [ 1217.412808] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.716426] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.716676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1217.814222] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452552, 'name': PowerOffVM_Task, 'duration_secs': 1.213615} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.814498] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1217.815302] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99c5daf-71d4-4f5b-98a6-584c680371d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.833072] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b195a7-57eb-4d8c-8cee-dbed5acacc75 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.882864] env[62383]: DEBUG nova.compute.manager [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1217.883080] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1217.883349] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d954cd7-a9b6-434a-ac46-451e9abeaf58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.891022] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1217.891022] env[62383]: value = "task-2452553" [ 1217.891022] env[62383]: _type = "Task" [ 1217.891022] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.899146] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.912200] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.219165] env[62383]: DEBUG nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1218.343184] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Creating Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1218.343632] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-88d08412-4da1-4090-a049-d36bcb203fd3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.350499] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1218.350499] env[62383]: value = "task-2452555" [ 1218.350499] env[62383]: _type = "Task" [ 1218.350499] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.358169] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452555, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.400476] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452553, 'name': PowerOffVM_Task, 'duration_secs': 0.221948} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.400680] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1218.400897] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1218.401116] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496603', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'name': 'volume-24d05613-a175-4448-bd5a-122c9a2e08ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '12843fba-0240-44fb-9687-d34a6333011b', 'attached_at': '2025-02-11T15:35:04.000000', 'detached_at': '', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'serial': '24d05613-a175-4448-bd5a-122c9a2e08ac'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1218.401852] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7182cee9-801d-4999-b814-c1066e3fb5ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.422954] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570abd90-1d48-40d1-8928-77370274dd43 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.428365] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.432527] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf3049e-c59d-4afa-94ed-69e5b84b62d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.449736] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fb1c41-9f93-45ce-8065-8d76202ed463 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.464391] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] The volume has not been displaced from its original location: [datastore2] volume-24d05613-a175-4448-bd5a-122c9a2e08ac/volume-24d05613-a175-4448-bd5a-122c9a2e08ac.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1218.469651] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1218.469912] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-906c8b76-60de-4544-a5bf-6fcc9dffd6a3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.487903] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1218.487903] env[62383]: value = "task-2452556" [ 1218.487903] env[62383]: _type = "Task" [ 1218.487903] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.496437] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452556, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.743343] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.743650] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.745333] env[62383]: INFO nova.compute.claims [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1218.860900] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452555, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.915697] env[62383]: DEBUG oslo_vmware.api [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452550, 'name': ReconfigVM_Task, 'duration_secs': 6.870535} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.915935] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.916182] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Reconfigured VM to detach interface {{(pid=62383) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1218.998380] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452556, 'name': ReconfigVM_Task, 'duration_secs': 0.163634} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.998681] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1219.005592] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4b0ac19-bbdd-4ef9-8c02-edc53519be15 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.022309] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1219.022309] env[62383]: value = "task-2452557" [ 1219.022309] env[62383]: _type = "Task" [ 1219.022309] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.030181] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.361293] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452555, 'name': CreateSnapshot_Task, 'duration_secs': 0.767942} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.361573] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Created Snapshot of the VM instance {{(pid=62383) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1219.362341] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1c95e6-7a08-48a5-a35c-13393f9344ca {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.532104] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452557, 'name': ReconfigVM_Task, 'duration_secs': 0.15973} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.532465] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496603', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'name': 'volume-24d05613-a175-4448-bd5a-122c9a2e08ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '12843fba-0240-44fb-9687-d34a6333011b', 'attached_at': '2025-02-11T15:35:04.000000', 'detached_at': '', 'volume_id': '24d05613-a175-4448-bd5a-122c9a2e08ac', 'serial': '24d05613-a175-4448-bd5a-122c9a2e08ac'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1219.532762] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1219.533508] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d923712-5c2b-4896-bcfa-de0a6e32093e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.539753] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1219.539995] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b5bdcdd-e458-4eb8-b448-585af1568d26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.600823] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1219.601064] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1219.601254] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleting the datastore file [datastore2] 12843fba-0240-44fb-9687-d34a6333011b {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1219.601505] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6beef89-edd9-4645-b33a-d0dddde2fe7c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.609336] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1219.609336] env[62383]: value = "task-2452559" [ 1219.609336] env[62383]: _type = "Task" [ 1219.609336] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.616887] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.854635] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d872edc6-ddd4-4c33-9ed6-e5b92929bc92 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.861668] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e6fac0-486b-4dad-9830-c25ff35a9807 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.897442] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Creating linked-clone VM from snapshot {{(pid=62383) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1219.898233] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-933aa4d6-5bb3-4bd7-8364-a08a96f917d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.901464] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741719ec-c689-4b8c-b1f0-49771e9478d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.909681] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669be677-59f7-4239-a854-4a86adf1a610 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.913286] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1219.913286] env[62383]: value = "task-2452560" [ 1219.913286] env[62383]: _type = "Task" [ 1219.913286] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.923682] env[62383]: DEBUG nova.compute.provider_tree [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.929487] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452560, 'name': CloneVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.120858] env[62383]: DEBUG oslo_vmware.api [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077757} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.121205] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1220.121489] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1220.121804] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1220.122131] env[62383]: INFO nova.compute.manager [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1220.122573] env[62383]: DEBUG oslo.service.loopingcall [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1220.122785] env[62383]: DEBUG nova.compute.manager [-] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1220.122900] env[62383]: DEBUG nova.network.neutron [-] [instance: 12843fba-0240-44fb-9687-d34a6333011b] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1220.242903] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1220.243118] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquired lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.243317] env[62383]: DEBUG nova.network.neutron [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1220.424741] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452560, 'name': CloneVM_Task} progress is 94%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.426738] env[62383]: DEBUG nova.scheduler.client.report [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1220.600832] env[62383]: DEBUG nova.compute.manager [req-dad2f262-b1c4-4f48-9efb-3c5ba3ddc2e3 req-a46d2be2-2bd2-4964-92c9-e5f9812358c6 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Received event network-vif-deleted-6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1220.600832] env[62383]: INFO nova.compute.manager [req-dad2f262-b1c4-4f48-9efb-3c5ba3ddc2e3 req-a46d2be2-2bd2-4964-92c9-e5f9812358c6 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Neutron deleted interface 6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0; detaching it from the instance and deleting it from the info cache [ 1220.600832] env[62383]: DEBUG nova.network.neutron [req-dad2f262-b1c4-4f48-9efb-3c5ba3ddc2e3 req-a46d2be2-2bd2-4964-92c9-e5f9812358c6 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.795708] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.795963] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.796184] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.796361] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.796547] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.808019] env[62383]: INFO nova.compute.manager [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Terminating instance [ 1220.924398] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452560, 'name': CloneVM_Task} progress is 95%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.931648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.932183] env[62383]: DEBUG nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1220.971398] env[62383]: INFO nova.network.neutron [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Port 66cdcb95-1241-47bf-829d-bbcea0032500 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1220.971805] env[62383]: DEBUG nova.network.neutron [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [{"id": "7f9cebec-50e0-428c-a19c-f39af2719a65", "address": "fa:16:3e:57:77:97", "network": {"id": "ce80ab32-a193-40db-be36-b8085e20a4c5", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1934153343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba559fb5da01474791c2408ca92bbff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f9cebec-50", "ovs_interfaceid": "7f9cebec-50e0-428c-a19c-f39af2719a65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.079524] env[62383]: DEBUG nova.network.neutron [-] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.103229] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ba34ebcc-94a8-4a37-8329-4f341d205277 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.112986] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a16d33-09cf-456d-9094-5b5967cb42e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.142577] env[62383]: DEBUG nova.compute.manager [req-dad2f262-b1c4-4f48-9efb-3c5ba3ddc2e3 req-a46d2be2-2bd2-4964-92c9-e5f9812358c6 service nova] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Detach interface failed, port_id=6a7f4f55-9cd8-4155-a3f8-f1d10a927bf0, reason: Instance 12843fba-0240-44fb-9687-d34a6333011b could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1221.311810] env[62383]: DEBUG nova.compute.manager [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1221.312081] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1221.312946] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60db361-f37b-4488-bb95-40f40c6b676b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.320492] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1221.320712] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2851a970-17ee-4a55-8d73-85cdaa1d41d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.327024] env[62383]: DEBUG oslo_vmware.api [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1221.327024] env[62383]: value = "task-2452561" [ 1221.327024] env[62383]: _type = "Task" [ 1221.327024] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.335141] env[62383]: DEBUG oslo_vmware.api [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.425237] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452560, 'name': CloneVM_Task, 'duration_secs': 1.131397} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.425575] env[62383]: INFO nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Created linked-clone VM from snapshot [ 1221.426366] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1804a9-22f3-44d6-a67a-329ce705e177 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.433498] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Uploading image c29d8c40-d10a-482c-893e-d9a6953f83ec {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1221.436927] env[62383]: DEBUG nova.compute.utils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1221.438288] env[62383]: DEBUG nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1221.438400] env[62383]: DEBUG nova.network.neutron [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1221.460206] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1221.460206] env[62383]: value = "vm-496620" [ 1221.460206] env[62383]: _type = "VirtualMachine" [ 1221.460206] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1221.461136] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-126bdab0-5639-4489-85a2-849bc66bf40f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.468871] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease: (returnval){ [ 1221.468871] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4b6f6-168c-0780-a764-c998f66950e9" [ 1221.468871] env[62383]: _type = "HttpNfcLease" [ 1221.468871] env[62383]: } obtained for exporting VM: (result){ [ 1221.468871] env[62383]: value = "vm-496620" [ 1221.468871] env[62383]: _type = "VirtualMachine" [ 1221.468871] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1221.469245] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the lease: (returnval){ [ 1221.469245] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4b6f6-168c-0780-a764-c998f66950e9" [ 1221.469245] env[62383]: _type = "HttpNfcLease" [ 1221.469245] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1221.476432] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Releasing lock "refresh_cache-a4e3b5a2-98c2-4376-bafd-49ccee64b262" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1221.478357] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1221.478357] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4b6f6-168c-0780-a764-c998f66950e9" [ 1221.478357] env[62383]: _type = "HttpNfcLease" [ 1221.478357] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1221.479770] env[62383]: DEBUG nova.policy [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e94f486c637c4b9f8c3cfa649688a603', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e320302a6b1e466e887c787006413dec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1221.582661] env[62383]: INFO nova.compute.manager [-] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Took 1.46 seconds to deallocate network for instance. [ 1221.836869] env[62383]: DEBUG oslo_vmware.api [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452561, 'name': PowerOffVM_Task, 'duration_secs': 0.211346} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.837164] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1221.837329] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1221.837593] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76540f9c-22d4-40f1-8d31-1e249e7870ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.897618] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1221.897845] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1221.898045] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleting the datastore file [datastore2] a4e3b5a2-98c2-4376-bafd-49ccee64b262 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1221.898317] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2aa62e95-fb40-44e6-b410-3c20c8477261 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.904655] env[62383]: DEBUG oslo_vmware.api [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1221.904655] env[62383]: value = "task-2452565" [ 1221.904655] env[62383]: _type = "Task" [ 1221.904655] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.912190] env[62383]: DEBUG oslo_vmware.api [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.941903] env[62383]: DEBUG nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1221.976975] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1221.976975] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4b6f6-168c-0780-a764-c998f66950e9" [ 1221.976975] env[62383]: _type = "HttpNfcLease" [ 1221.976975] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1221.977333] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1221.977333] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4b6f6-168c-0780-a764-c998f66950e9" [ 1221.977333] env[62383]: _type = "HttpNfcLease" [ 1221.977333] env[62383]: }. {{(pid=62383) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1221.978072] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d6e7f0-1641-44fa-a733-0adb34a900aa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.982588] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb6784a4-9e32-41c4-b093-d6b09f7c1a2e tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "interface-a4e3b5a2-98c2-4376-bafd-49ccee64b262-66cdcb95-1241-47bf-829d-bbcea0032500" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.667s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1221.988102] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5286766d-ca15-90b8-a23d-ba0dcd0d2557/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1221.988102] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5286766d-ca15-90b8-a23d-ba0dcd0d2557/disk-0.vmdk for reading. {{(pid=62383) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1222.048860] env[62383]: DEBUG nova.network.neutron [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Successfully created port: a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1222.084348] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-db7b22b4-290f-4d34-bc8d-da95a1166be1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.124845] env[62383]: INFO nova.compute.manager [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Took 0.54 seconds to detach 1 volumes for instance. [ 1222.126943] env[62383]: DEBUG nova.compute.manager [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Deleting volume: 24d05613-a175-4448-bd5a-122c9a2e08ac {{(pid=62383) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1222.418051] env[62383]: DEBUG oslo_vmware.api [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165776} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.418460] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1222.418741] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1222.418930] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1222.419133] env[62383]: INFO nova.compute.manager [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1222.419459] env[62383]: DEBUG oslo.service.loopingcall [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1222.419766] env[62383]: DEBUG nova.compute.manager [-] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1222.419863] env[62383]: DEBUG nova.network.neutron [-] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1222.668582] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.668948] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.669283] env[62383]: DEBUG nova.objects.instance [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'resources' on Instance uuid 12843fba-0240-44fb-9687-d34a6333011b {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1222.954560] env[62383]: DEBUG nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1222.977689] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1222.977936] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1222.978113] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1222.978304] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1222.978453] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1222.978599] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1222.978809] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1222.978968] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1222.979150] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1222.979313] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1222.979486] env[62383]: DEBUG nova.virt.hardware [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1222.980415] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c9980f-ebe9-41be-b630-d339493ea6e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.988633] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39abfd31-bdbd-4298-9835-8c8adcb5ad34 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.110655] env[62383]: DEBUG nova.compute.manager [req-0264a9e1-f269-4984-bd57-a774091111c4 req-c5d3d264-da02-4870-8482-372a12865ca7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Received event network-vif-deleted-7f9cebec-50e0-428c-a19c-f39af2719a65 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1223.110975] env[62383]: INFO nova.compute.manager [req-0264a9e1-f269-4984-bd57-a774091111c4 req-c5d3d264-da02-4870-8482-372a12865ca7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Neutron deleted interface 7f9cebec-50e0-428c-a19c-f39af2719a65; detaching it from the instance and deleting it from the info cache [ 1223.111298] env[62383]: DEBUG nova.network.neutron [req-0264a9e1-f269-4984-bd57-a774091111c4 req-c5d3d264-da02-4870-8482-372a12865ca7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.293650] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a5955b-c1c2-45bc-ac34-58c9a589bf86 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.301797] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e444a0e-1127-4a9b-acf6-687eb5823a53 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.333974] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c6f4c0-444e-4a95-8446-f7ab1b9d2940 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.341906] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c04520-bb79-4605-9eca-e2f8da33027f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.356409] env[62383]: DEBUG nova.compute.provider_tree [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1223.508639] env[62383]: DEBUG nova.network.neutron [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Successfully updated port: a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1223.595607] env[62383]: DEBUG nova.network.neutron [-] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.614689] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1017f349-aff6-4145-ba58-56f719355d13 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.625070] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c82887-f50f-40a0-b199-ed7f37665a19 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.659228] env[62383]: DEBUG nova.compute.manager [req-0264a9e1-f269-4984-bd57-a774091111c4 req-c5d3d264-da02-4870-8482-372a12865ca7 service nova] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Detach interface failed, port_id=7f9cebec-50e0-428c-a19c-f39af2719a65, reason: Instance a4e3b5a2-98c2-4376-bafd-49ccee64b262 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1223.859935] env[62383]: DEBUG nova.scheduler.client.report [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1224.011124] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1224.011304] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.011476] env[62383]: DEBUG nova.network.neutron [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1224.098103] env[62383]: INFO nova.compute.manager [-] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Took 1.68 seconds to deallocate network for instance. [ 1224.365521] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.696s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1224.393849] env[62383]: INFO nova.scheduler.client.report [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted allocations for instance 12843fba-0240-44fb-9687-d34a6333011b [ 1224.551925] env[62383]: DEBUG nova.network.neutron [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1224.605123] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1224.605525] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1224.606292] env[62383]: DEBUG nova.objects.instance [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'resources' on Instance uuid a4e3b5a2-98c2-4376-bafd-49ccee64b262 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1224.712403] env[62383]: DEBUG nova.network.neutron [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updating instance_info_cache with network_info: [{"id": "a782529d-0901-4b64-93d5-d80e66052a01", "address": "fa:16:3e:7f:b0:78", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa782529d-09", "ovs_interfaceid": "a782529d-0901-4b64-93d5-d80e66052a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.901014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-24aae31b-7925-4e3e-acae-1c910b6a4c65 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "12843fba-0240-44fb-9687-d34a6333011b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.525s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.143824] env[62383]: DEBUG nova.compute.manager [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Received event network-vif-plugged-a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1225.144399] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] Acquiring lock "02e2865c-ff68-44ac-abc6-839e399bbe7c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.144399] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.144399] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.144566] env[62383]: DEBUG nova.compute.manager [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] No waiting events found dispatching network-vif-plugged-a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1225.144831] env[62383]: WARNING nova.compute.manager [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Received unexpected event network-vif-plugged-a782529d-0901-4b64-93d5-d80e66052a01 for instance with vm_state building and task_state spawning. [ 1225.144976] env[62383]: DEBUG nova.compute.manager [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Received event network-changed-a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1225.145130] env[62383]: DEBUG nova.compute.manager [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Refreshing instance network info cache due to event network-changed-a782529d-0901-4b64-93d5-d80e66052a01. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1225.145466] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] Acquiring lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.197583] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.197830] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.198150] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.198403] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.198589] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.204473] env[62383]: INFO nova.compute.manager [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Terminating instance [ 1225.214580] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1225.214768] env[62383]: DEBUG nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Instance network_info: |[{"id": "a782529d-0901-4b64-93d5-d80e66052a01", "address": "fa:16:3e:7f:b0:78", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa782529d-09", "ovs_interfaceid": "a782529d-0901-4b64-93d5-d80e66052a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1225.216914] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] Acquired lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.216914] env[62383]: DEBUG nova.network.neutron [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Refreshing network info cache for port a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1225.217076] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:b0:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a782529d-0901-4b64-93d5-d80e66052a01', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1225.226923] env[62383]: DEBUG oslo.service.loopingcall [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1225.228867] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1225.229746] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f531cb-652e-44fc-8dd7-edf3b2a1fbe8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.232890] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b65e29c8-a0e4-4312-ac9c-d3c9ba6875d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.254276] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ae1aee-e562-4b42-9bf2-471dc0837142 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.257562] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1225.257562] env[62383]: value = "task-2452567" [ 1225.257562] env[62383]: _type = "Task" [ 1225.257562] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.286914] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc69349-7c8e-4858-b57c-4fc9ee2ac394 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.292706] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452567, 'name': CreateVM_Task} progress is 15%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.297644] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda5bd28-37ac-40af-891b-af6ce90e4100 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.315528] env[62383]: DEBUG nova.compute.provider_tree [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.708661] env[62383]: DEBUG nova.compute.manager [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1225.708978] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1225.709873] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d679b0-4f3a-4ebc-ac68-def4ab648016 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.718497] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1225.718784] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa37c570-19b1-4ce4-8d3e-7c7584dde0fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.724833] env[62383]: DEBUG oslo_vmware.api [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1225.724833] env[62383]: value = "task-2452568" [ 1225.724833] env[62383]: _type = "Task" [ 1225.724833] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.735490] env[62383]: DEBUG oslo_vmware.api [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452568, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.766917] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452567, 'name': CreateVM_Task, 'duration_secs': 0.3543} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.767193] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1225.767948] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1225.768060] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.768383] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1225.768638] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-928d2d0b-8936-4354-b428-f42b15d099a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.773158] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1225.773158] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52484aea-494a-29c5-5127-43cc8bf47733" [ 1225.773158] env[62383]: _type = "Task" [ 1225.773158] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.783494] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52484aea-494a-29c5-5127-43cc8bf47733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.819442] env[62383]: DEBUG nova.scheduler.client.report [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.060082] env[62383]: DEBUG nova.network.neutron [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updated VIF entry in instance network info cache for port a782529d-0901-4b64-93d5-d80e66052a01. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1226.060463] env[62383]: DEBUG nova.network.neutron [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updating instance_info_cache with network_info: [{"id": "a782529d-0901-4b64-93d5-d80e66052a01", "address": "fa:16:3e:7f:b0:78", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa782529d-09", "ovs_interfaceid": "a782529d-0901-4b64-93d5-d80e66052a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.237033] env[62383]: DEBUG oslo_vmware.api [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452568, 'name': PowerOffVM_Task, 'duration_secs': 0.29199} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.237033] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1226.237033] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1226.237241] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8ec75dd-c875-4817-bf2e-c20a316730f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.284032] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52484aea-494a-29c5-5127-43cc8bf47733, 'name': SearchDatastore_Task, 'duration_secs': 0.011794} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.284466] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.284726] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1226.285018] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1226.285248] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.285485] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1226.285777] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b0644fd-999c-4084-b400-e056b2230d5c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.295366] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1226.295518] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1226.296369] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c694e708-4cc4-40ac-92a5-b4eac87b900f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.301969] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1226.301969] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52bde7b6-4e04-07d9-a15e-d29bcb803131" [ 1226.301969] env[62383]: _type = "Task" [ 1226.301969] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.307228] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1226.307459] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1226.307641] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleting the datastore file [datastore2] c94e9a83-04de-4144-ab6e-d96dc7c39e6d {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1226.309875] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dbf0dc0-245d-4eba-8170-ca7ccbb44491 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.316024] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52bde7b6-4e04-07d9-a15e-d29bcb803131, 'name': SearchDatastore_Task} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.317152] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8325c8ab-6cd5-4848-847a-8c58785c17cb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.320975] env[62383]: DEBUG oslo_vmware.api [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1226.320975] env[62383]: value = "task-2452570" [ 1226.320975] env[62383]: _type = "Task" [ 1226.320975] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.326860] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.329364] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1226.329364] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b602bc-91db-c06b-c319-4d616ef90c05" [ 1226.329364] env[62383]: _type = "Task" [ 1226.329364] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.337125] env[62383]: DEBUG oslo_vmware.api [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.345342] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b602bc-91db-c06b-c319-4d616ef90c05, 'name': SearchDatastore_Task, 'duration_secs': 0.010246} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.345342] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.345342] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 02e2865c-ff68-44ac-abc6-839e399bbe7c/02e2865c-ff68-44ac-abc6-839e399bbe7c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1226.345545] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b66a9297-d561-43d8-ba66-d5ba01dc9f04 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.352983] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1226.352983] env[62383]: value = "task-2452571" [ 1226.352983] env[62383]: _type = "Task" [ 1226.352983] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.361072] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.362071] env[62383]: INFO nova.scheduler.client.report [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted allocations for instance a4e3b5a2-98c2-4376-bafd-49ccee64b262 [ 1226.563753] env[62383]: DEBUG oslo_concurrency.lockutils [req-9e915da0-eb19-42ba-9960-7f8020ec9179 req-cd68f628-d59f-4997-90a9-d78d61caf58b service nova] Releasing lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1226.832928] env[62383]: DEBUG oslo_vmware.api [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184786} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.833828] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1226.833828] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1226.834010] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1226.834112] env[62383]: INFO nova.compute.manager [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1226.834396] env[62383]: DEBUG oslo.service.loopingcall [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1226.834609] env[62383]: DEBUG nova.compute.manager [-] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1226.834717] env[62383]: DEBUG nova.network.neutron [-] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1226.874410] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452571, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496929} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.874954] env[62383]: DEBUG oslo_concurrency.lockutils [None req-2de55329-f234-4c0f-940d-134dbb37a615 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "a4e3b5a2-98c2-4376-bafd-49ccee64b262" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.079s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.875965] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 02e2865c-ff68-44ac-abc6-839e399bbe7c/02e2865c-ff68-44ac-abc6-839e399bbe7c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1226.876216] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1226.876646] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e1bffbf-3fdb-4179-b4dd-2c199ab6cc4a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.883802] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1226.883802] env[62383]: value = "task-2452572" [ 1226.883802] env[62383]: _type = "Task" [ 1226.883802] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.900910] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.172837] env[62383]: DEBUG nova.compute.manager [req-8898688b-5adf-427f-accb-c4a5be80d046 req-4ae88500-4cbc-49c5-94a3-c340aa83d2a2 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Received event network-vif-deleted-ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1227.174901] env[62383]: INFO nova.compute.manager [req-8898688b-5adf-427f-accb-c4a5be80d046 req-4ae88500-4cbc-49c5-94a3-c340aa83d2a2 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Neutron deleted interface ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2; detaching it from the instance and deleting it from the info cache [ 1227.174901] env[62383]: DEBUG nova.network.neutron [req-8898688b-5adf-427f-accb-c4a5be80d046 req-4ae88500-4cbc-49c5-94a3-c340aa83d2a2 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.393289] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07031} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.393469] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1227.394130] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4165aa7b-f4b2-4211-8dda-4bd6afc227ed {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.416773] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 02e2865c-ff68-44ac-abc6-839e399bbe7c/02e2865c-ff68-44ac-abc6-839e399bbe7c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1227.417093] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3416991-2add-4d26-8a87-2ee249171bc3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.436699] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1227.436699] env[62383]: value = "task-2452573" [ 1227.436699] env[62383]: _type = "Task" [ 1227.436699] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.444896] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452573, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.600719] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.601023] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.601417] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.601665] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.601847] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.603951] env[62383]: DEBUG nova.network.neutron [-] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.606937] env[62383]: INFO nova.compute.manager [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Terminating instance [ 1227.676637] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d1d5171-1415-443b-96a6-3de3cdb24947 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.687240] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a355f66c-ee4e-4b3c-bed3-ed8101aacb85 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.718144] env[62383]: DEBUG nova.compute.manager [req-8898688b-5adf-427f-accb-c4a5be80d046 req-4ae88500-4cbc-49c5-94a3-c340aa83d2a2 service nova] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Detach interface failed, port_id=ff37bd19-ebc6-4eca-a20a-89b1dcf23bb2, reason: Instance c94e9a83-04de-4144-ab6e-d96dc7c39e6d could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1227.946879] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452573, 'name': ReconfigVM_Task, 'duration_secs': 0.341503} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.947192] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 02e2865c-ff68-44ac-abc6-839e399bbe7c/02e2865c-ff68-44ac-abc6-839e399bbe7c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1227.947824] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23262540-70a5-4735-bfc1-d3a932c9b9ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.954797] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1227.954797] env[62383]: value = "task-2452574" [ 1227.954797] env[62383]: _type = "Task" [ 1227.954797] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.962982] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452574, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.107960] env[62383]: INFO nova.compute.manager [-] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Took 1.27 seconds to deallocate network for instance. [ 1228.113961] env[62383]: DEBUG nova.compute.manager [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1228.114207] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1228.115155] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ed84e0-6f45-4537-b7f5-908622e15e4d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.122660] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1228.122909] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fef56aa3-3ec0-45e6-a5da-825f72b8b4e6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.129779] env[62383]: DEBUG oslo_vmware.api [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1228.129779] env[62383]: value = "task-2452575" [ 1228.129779] env[62383]: _type = "Task" [ 1228.129779] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.139462] env[62383]: DEBUG oslo_vmware.api [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452575, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.464757] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452574, 'name': Rename_Task, 'duration_secs': 0.165587} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.465157] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1228.465378] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f5766fa-a684-445b-8196-2fae623df80c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.472484] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1228.472484] env[62383]: value = "task-2452576" [ 1228.472484] env[62383]: _type = "Task" [ 1228.472484] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.480695] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452576, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.615188] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.615550] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1228.615852] env[62383]: DEBUG nova.objects.instance [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'resources' on Instance uuid c94e9a83-04de-4144-ab6e-d96dc7c39e6d {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.639367] env[62383]: DEBUG oslo_vmware.api [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452575, 'name': PowerOffVM_Task, 'duration_secs': 0.225554} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.639629] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1228.639800] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1228.640056] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5041909-3ea0-4b75-9905-2e71059e17c6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.724697] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1228.725019] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1228.725319] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleting the datastore file [datastore2] ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1228.725750] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d626390-6958-4808-b95f-d91f97162406 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.733689] env[62383]: DEBUG oslo_vmware.api [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for the task: (returnval){ [ 1228.733689] env[62383]: value = "task-2452578" [ 1228.733689] env[62383]: _type = "Task" [ 1228.733689] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.741932] env[62383]: DEBUG oslo_vmware.api [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452578, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.982613] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452576, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.221312] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d652bc-254c-436f-bbff-e6d9890ae51a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.229770] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4183103c-2ae6-47be-9757-e30be217a7b0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.264306] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee886187-2206-4b05-97c9-834d2bd210d8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.270072] env[62383]: DEBUG oslo_vmware.api [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Task: {'id': task-2452578, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211142} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.270726] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1229.270917] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1229.271116] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1229.271298] env[62383]: INFO nova.compute.manager [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1229.271543] env[62383]: DEBUG oslo.service.loopingcall [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1229.271745] env[62383]: DEBUG nova.compute.manager [-] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1229.271831] env[62383]: DEBUG nova.network.neutron [-] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1229.276779] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc054ae9-e0e5-4495-bc86-15cb4c27743d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.290476] env[62383]: DEBUG nova.compute.provider_tree [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1229.483339] env[62383]: DEBUG oslo_vmware.api [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452576, 'name': PowerOnVM_Task, 'duration_secs': 0.529543} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.483685] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1229.483960] env[62383]: INFO nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Took 6.53 seconds to spawn the instance on the hypervisor. [ 1229.484061] env[62383]: DEBUG nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1229.484831] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470d8381-2cb1-4de4-afde-30ad5c858d87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.564716] env[62383]: DEBUG nova.compute.manager [req-4cadadd5-e22f-4219-abca-a96af6ee90fe req-5bfc53ca-3d45-4a0a-86b4-b64a8e721ee9 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Received event network-vif-deleted-633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1229.564988] env[62383]: INFO nova.compute.manager [req-4cadadd5-e22f-4219-abca-a96af6ee90fe req-5bfc53ca-3d45-4a0a-86b4-b64a8e721ee9 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Neutron deleted interface 633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee; detaching it from the instance and deleting it from the info cache [ 1229.565164] env[62383]: DEBUG nova.network.neutron [req-4cadadd5-e22f-4219-abca-a96af6ee90fe req-5bfc53ca-3d45-4a0a-86b4-b64a8e721ee9 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.813715] env[62383]: ERROR nova.scheduler.client.report [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [req-843a53d2-fce0-4cb6-b16e-3c0b27c74bbe] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-843a53d2-fce0-4cb6-b16e-3c0b27c74bbe"}]} [ 1229.830641] env[62383]: DEBUG nova.scheduler.client.report [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1229.845128] env[62383]: DEBUG nova.scheduler.client.report [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1229.845429] env[62383]: DEBUG nova.compute.provider_tree [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1229.858137] env[62383]: DEBUG nova.scheduler.client.report [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1229.878884] env[62383]: DEBUG nova.scheduler.client.report [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1229.968439] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf01a05-dde0-4139-b0ac-f4386e517d88 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.976316] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2068f8-85c8-4adf-9737-e8a765ee6f20 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.012798] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c196eed-d769-46ff-b9ca-75f006911d22 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.017958] env[62383]: INFO nova.compute.manager [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Took 11.29 seconds to build instance. [ 1230.022532] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16779f2-e9d0-4c3b-916d-fd869b257a44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.036962] env[62383]: DEBUG nova.compute.provider_tree [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1230.045746] env[62383]: DEBUG nova.network.neutron [-] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.067974] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ab8163a-a1b3-41d8-9e56-78c0a73ff950 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.078875] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d524d9-361b-4ef1-9fc8-831a3bab6b89 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.111262] env[62383]: DEBUG nova.compute.manager [req-4cadadd5-e22f-4219-abca-a96af6ee90fe req-5bfc53ca-3d45-4a0a-86b4-b64a8e721ee9 service nova] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Detach interface failed, port_id=633f7cbf-4b39-4d0a-944c-adaf5bb4a8ee, reason: Instance ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1230.520136] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f98aa5cd-5372-4bfa-9be0-3b7030526c83 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.803s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.547865] env[62383]: INFO nova.compute.manager [-] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Took 1.28 seconds to deallocate network for instance. [ 1230.579493] env[62383]: DEBUG nova.scheduler.client.report [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updated inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 with generation 179 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1230.579659] env[62383]: DEBUG nova.compute.provider_tree [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating resource provider 60615f54-0557-436e-a486-87505bffb4c7 generation from 179 to 180 during operation: update_inventory {{(pid=62383) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1230.579805] env[62383]: DEBUG nova.compute.provider_tree [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1231.057440] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.084669] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.469s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.087546] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.030s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.087895] env[62383]: DEBUG nova.objects.instance [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lazy-loading 'resources' on Instance uuid ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1231.104207] env[62383]: INFO nova.scheduler.client.report [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted allocations for instance c94e9a83-04de-4144-ab6e-d96dc7c39e6d [ 1231.537324] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5286766d-ca15-90b8-a23d-ba0dcd0d2557/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1231.538442] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9393ba04-db23-4a29-a42c-e2a8d79eefdc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.545523] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5286766d-ca15-90b8-a23d-ba0dcd0d2557/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1231.545710] env[62383]: ERROR oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5286766d-ca15-90b8-a23d-ba0dcd0d2557/disk-0.vmdk due to incomplete transfer. [ 1231.545959] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-12ba992b-4044-41bb-9b51-3ce9cf3ca126 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.553407] env[62383]: DEBUG oslo_vmware.rw_handles [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5286766d-ca15-90b8-a23d-ba0dcd0d2557/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1231.553728] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Uploaded image c29d8c40-d10a-482c-893e-d9a6953f83ec to the Glance image server {{(pid=62383) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1231.556779] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Destroying the VM {{(pid=62383) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1231.557086] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6c857123-7239-4098-add9-e99820262a8d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.562878] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1231.562878] env[62383]: value = "task-2452580" [ 1231.562878] env[62383]: _type = "Task" [ 1231.562878] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.573734] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452580, 'name': Destroy_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.595020] env[62383]: DEBUG nova.compute.manager [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Received event network-changed-a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1231.595222] env[62383]: DEBUG nova.compute.manager [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Refreshing instance network info cache due to event network-changed-a782529d-0901-4b64-93d5-d80e66052a01. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1231.595467] env[62383]: DEBUG oslo_concurrency.lockutils [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] Acquiring lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1231.595697] env[62383]: DEBUG oslo_concurrency.lockutils [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] Acquired lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.595845] env[62383]: DEBUG nova.network.neutron [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Refreshing network info cache for port a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1231.613889] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb702e77-b29c-4cd5-b2d5-8e016e429e5c tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "c94e9a83-04de-4144-ab6e-d96dc7c39e6d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.416s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.673335] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b60247c-39d9-44be-8546-62f4fafb35fb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.681054] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809163fc-04da-4815-8442-97382d2bbe1b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.712516] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f348c5b-8365-4832-80a2-ee74bf37b31b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.720744] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ab37cb3-b88b-422b-9eb2-e3e9117ad8ef {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.735234] env[62383]: DEBUG nova.compute.provider_tree [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.073047] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452580, 'name': Destroy_Task, 'duration_secs': 0.335088} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.073327] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Destroyed the VM [ 1232.073564] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Deleting Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1232.073805] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d399a690-824f-46da-bee1-86a294d8770a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.080568] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1232.080568] env[62383]: value = "task-2452581" [ 1232.080568] env[62383]: _type = "Task" [ 1232.080568] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.087721] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452581, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.240512] env[62383]: DEBUG nova.scheduler.client.report [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1232.308297] env[62383]: DEBUG nova.network.neutron [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updated VIF entry in instance network info cache for port a782529d-0901-4b64-93d5-d80e66052a01. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1232.308714] env[62383]: DEBUG nova.network.neutron [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updating instance_info_cache with network_info: [{"id": "a782529d-0901-4b64-93d5-d80e66052a01", "address": "fa:16:3e:7f:b0:78", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa782529d-09", "ovs_interfaceid": "a782529d-0901-4b64-93d5-d80e66052a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1232.590206] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452581, 'name': RemoveSnapshot_Task, 'duration_secs': 0.357325} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.590507] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Deleted Snapshot of the VM instance {{(pid=62383) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1232.590807] env[62383]: DEBUG nova.compute.manager [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1232.591558] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cfaebd-74c9-48d6-ba0d-8b4cc301d3a9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.748051] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.767537] env[62383]: INFO nova.scheduler.client.report [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Deleted allocations for instance ac4e173d-fec9-4a0f-b9b6-ad83a98989e7 [ 1232.813404] env[62383]: DEBUG oslo_concurrency.lockutils [req-ccfd6f56-e7a0-4666-9987-15df3cdc44d8 req-240e2a9e-78bb-4d45-97fc-cfaeb05474d7 service nova] Releasing lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.102920] env[62383]: INFO nova.compute.manager [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Shelve offloading [ 1233.274851] env[62383]: DEBUG oslo_concurrency.lockutils [None req-66953bde-1127-4a5b-ae43-76da5a9a3719 tempest-AttachInterfacesTestJSON-112066379 tempest-AttachInterfacesTestJSON-112066379-project-member] Lock "ac4e173d-fec9-4a0f-b9b6-ad83a98989e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.674s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1233.606409] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1233.606792] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7443046b-50dd-4a23-9e3a-7b127a2085b1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.614604] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1233.614604] env[62383]: value = "task-2452582" [ 1233.614604] env[62383]: _type = "Task" [ 1233.614604] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.623771] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1233.623979] env[62383]: DEBUG nova.compute.manager [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.624767] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40b044f-258a-4d96-bb62-4cb62fd9d265 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.630301] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.630468] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.630640] env[62383]: DEBUG nova.network.neutron [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.333491] env[62383]: DEBUG nova.network.neutron [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.836477] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.999271] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "1b025655-acad-4b70-9e1a-489683cafb7e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.999546] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "1b025655-acad-4b70-9e1a-489683cafb7e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.999769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "1b025655-acad-4b70-9e1a-489683cafb7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.999952] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "1b025655-acad-4b70-9e1a-489683cafb7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.000146] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "1b025655-acad-4b70-9e1a-489683cafb7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.003029] env[62383]: INFO nova.compute.manager [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Terminating instance [ 1235.069279] env[62383]: DEBUG nova.compute.manager [req-e86a6cfd-aeb8-4b98-a341-c9f5af3bd0c5 req-17489310-ec7f-4b33-84f0-5c968646c3b7 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-vif-unplugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1235.069279] env[62383]: DEBUG oslo_concurrency.lockutils [req-e86a6cfd-aeb8-4b98-a341-c9f5af3bd0c5 req-17489310-ec7f-4b33-84f0-5c968646c3b7 service nova] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.069279] env[62383]: DEBUG oslo_concurrency.lockutils [req-e86a6cfd-aeb8-4b98-a341-c9f5af3bd0c5 req-17489310-ec7f-4b33-84f0-5c968646c3b7 service nova] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.069279] env[62383]: DEBUG oslo_concurrency.lockutils [req-e86a6cfd-aeb8-4b98-a341-c9f5af3bd0c5 req-17489310-ec7f-4b33-84f0-5c968646c3b7 service nova] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.069279] env[62383]: DEBUG nova.compute.manager [req-e86a6cfd-aeb8-4b98-a341-c9f5af3bd0c5 req-17489310-ec7f-4b33-84f0-5c968646c3b7 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] No waiting events found dispatching network-vif-unplugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1235.069279] env[62383]: WARNING nova.compute.manager [req-e86a6cfd-aeb8-4b98-a341-c9f5af3bd0c5 req-17489310-ec7f-4b33-84f0-5c968646c3b7 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received unexpected event network-vif-unplugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 for instance with vm_state shelved and task_state shelving_offloading. [ 1235.160144] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1235.160326] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4767634-abba-4c71-9add-3b19a0ff992d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.168892] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1235.169821] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4aff758-c67f-4cfb-bf77-a25f2856c8d7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.507292] env[62383]: DEBUG nova.compute.manager [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1235.507540] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1235.508471] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a704591-9b7a-4ba6-8216-afb5d26b0c1d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.516961] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.517217] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8763dfe-47af-4cfa-bcc8-a722e584791b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.523242] env[62383]: DEBUG oslo_vmware.api [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1235.523242] env[62383]: value = "task-2452584" [ 1235.523242] env[62383]: _type = "Task" [ 1235.523242] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.531771] env[62383]: DEBUG oslo_vmware.api [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.653471] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1235.653807] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1235.654114] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleting the datastore file [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1235.654456] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe9bca3e-6fce-4198-8242-8e828d42ed47 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.661797] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1235.661797] env[62383]: value = "task-2452585" [ 1235.661797] env[62383]: _type = "Task" [ 1235.661797] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.670156] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.032760] env[62383]: DEBUG oslo_vmware.api [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452584, 'name': PowerOffVM_Task, 'duration_secs': 0.237133} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.034021] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.034021] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1236.034021] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c26dfab-9520-4ddf-a800-76e7e870be16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.096959] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1236.097207] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1236.097390] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleting the datastore file [datastore2] 1b025655-acad-4b70-9e1a-489683cafb7e {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1236.098136] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62c13e17-e2a0-4c62-a3f7-74400070fffb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.105958] env[62383]: DEBUG oslo_vmware.api [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for the task: (returnval){ [ 1236.105958] env[62383]: value = "task-2452587" [ 1236.105958] env[62383]: _type = "Task" [ 1236.105958] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.113870] env[62383]: DEBUG oslo_vmware.api [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.172606] env[62383]: DEBUG oslo_vmware.api [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151914} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.172903] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.173498] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.173498] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.202230] env[62383]: INFO nova.scheduler.client.report [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted allocations for instance b8e512cd-5eb9-423c-9447-833e34909bc3 [ 1236.615560] env[62383]: DEBUG oslo_vmware.api [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Task: {'id': task-2452587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160052} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.615842] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1236.616052] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1236.616245] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1236.616426] env[62383]: INFO nova.compute.manager [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1236.616667] env[62383]: DEBUG oslo.service.loopingcall [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1236.616881] env[62383]: DEBUG nova.compute.manager [-] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1236.616984] env[62383]: DEBUG nova.network.neutron [-] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1236.707241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.707475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.707715] env[62383]: DEBUG nova.objects.instance [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'resources' on Instance uuid b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.105766] env[62383]: DEBUG nova.compute.manager [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1237.106055] env[62383]: DEBUG nova.compute.manager [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing instance network info cache due to event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1237.106232] env[62383]: DEBUG oslo_concurrency.lockutils [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.106450] env[62383]: DEBUG oslo_concurrency.lockutils [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.106542] env[62383]: DEBUG nova.network.neutron [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1237.210188] env[62383]: DEBUG nova.objects.instance [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'numa_topology' on Instance uuid b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.715124] env[62383]: DEBUG nova.objects.base [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1237.776885] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ffe78d-0e0f-43c1-80ba-834415db570a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.787799] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98290091-7840-409f-be81-d16ea44568e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.826598] env[62383]: DEBUG nova.network.neutron [-] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.832036] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5391fc85-5c98-440c-8dee-92d2ef2da17a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.842769] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8189094c-9117-4cc7-9438-23a9b7b885f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.859278] env[62383]: DEBUG nova.compute.provider_tree [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.990096] env[62383]: DEBUG nova.network.neutron [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updated VIF entry in instance network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1237.990420] env[62383]: DEBUG nova.network.neutron [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.338567] env[62383]: INFO nova.compute.manager [-] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Took 1.72 seconds to deallocate network for instance. [ 1238.363258] env[62383]: DEBUG nova.scheduler.client.report [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.493680] env[62383]: DEBUG oslo_concurrency.lockutils [req-78ce738f-1a2c-43bc-83be-dff06ba1b845 req-ae7e34a9-00a9-4de1-ba08-4c61a5941c57 service nova] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.843901] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1238.867966] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.160s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1238.871191] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.027s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1238.871454] env[62383]: DEBUG nova.objects.instance [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lazy-loading 'resources' on Instance uuid 1b025655-acad-4b70-9e1a-489683cafb7e {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.132846] env[62383]: DEBUG nova.compute.manager [req-696d8b61-d9dd-4cc3-a0f8-7ae04b023c80 req-3f5e3636-736f-4390-92e2-d42b77f6c4f5 service nova] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Received event network-vif-deleted-845110d3-620c-4852-8aab-e6907d5b3af2 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1239.383629] env[62383]: DEBUG oslo_concurrency.lockutils [None req-a531766c-46a6-4364-aeae-22351bdfc48a tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.099s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.445676] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80347ec5-9977-4c33-8d5b-0c9965927231 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.453402] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9819396-dc5e-4ab0-a352-f113ecab2c87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.483333] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f249ea1-a230-444a-a95f-c10af0e16d9f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.489698] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.489913] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.490123] env[62383]: INFO nova.compute.manager [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Unshelving [ 1239.492879] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2764a595-ef18-44c3-85c8-d387363529c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.506589] env[62383]: DEBUG nova.compute.provider_tree [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.512811] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.513050] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.009103] env[62383]: DEBUG nova.scheduler.client.report [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1240.015053] env[62383]: DEBUG nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1240.514405] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.643s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.521488] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.521729] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.521933] env[62383]: DEBUG nova.objects.instance [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'pci_requests' on Instance uuid b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1240.532500] env[62383]: INFO nova.scheduler.client.report [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Deleted allocations for instance 1b025655-acad-4b70-9e1a-489683cafb7e [ 1240.534880] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.026824] env[62383]: DEBUG nova.objects.instance [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'numa_topology' on Instance uuid b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.040914] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1aaa3616-18fb-4fdc-b177-81bab5674df0 tempest-ServerActionsTestOtherA-2071253163 tempest-ServerActionsTestOtherA-2071253163-project-member] Lock "1b025655-acad-4b70-9e1a-489683cafb7e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.041s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.529035] env[62383]: INFO nova.compute.claims [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1242.619226] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a2fe8c-1485-49af-9ec7-6d6bf258847e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.628170] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f4c611-9aa3-4aca-bb6d-88990751ad49 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.661985] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b23b937-30e2-4339-8aab-6c7d593336eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.669668] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0357e37-7ee1-4aa6-88ad-65eaccc9aef0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.684996] env[62383]: DEBUG nova.compute.provider_tree [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.188605] env[62383]: DEBUG nova.scheduler.client.report [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1243.693730] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.172s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.696523] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.162s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.698070] env[62383]: INFO nova.compute.claims [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1243.728389] env[62383]: INFO nova.network.neutron [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating port f1d1962c-c0f2-4e5b-9885-b71019f7e792 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1244.772703] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f8c054-80fe-4d80-ac49-670791731b96 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.781289] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd5580f-0685-48d6-9f84-a0c396a24a9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.812980] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d2d288-c0d6-456d-8568-b3bfc14dbfc1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.820528] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb855ef7-c9de-418b-a655-88cb56e3affb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.834092] env[62383]: DEBUG nova.compute.provider_tree [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.126026] env[62383]: DEBUG nova.compute.manager [req-72be9883-9cd2-4bac-85de-432998368761 req-bc4a6ade-c704-47b1-8f2d-85da2dab28d7 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-vif-plugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1245.128355] env[62383]: DEBUG oslo_concurrency.lockutils [req-72be9883-9cd2-4bac-85de-432998368761 req-bc4a6ade-c704-47b1-8f2d-85da2dab28d7 service nova] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.128551] env[62383]: DEBUG oslo_concurrency.lockutils [req-72be9883-9cd2-4bac-85de-432998368761 req-bc4a6ade-c704-47b1-8f2d-85da2dab28d7 service nova] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.129251] env[62383]: DEBUG oslo_concurrency.lockutils [req-72be9883-9cd2-4bac-85de-432998368761 req-bc4a6ade-c704-47b1-8f2d-85da2dab28d7 service nova] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.129251] env[62383]: DEBUG nova.compute.manager [req-72be9883-9cd2-4bac-85de-432998368761 req-bc4a6ade-c704-47b1-8f2d-85da2dab28d7 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] No waiting events found dispatching network-vif-plugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1245.129251] env[62383]: WARNING nova.compute.manager [req-72be9883-9cd2-4bac-85de-432998368761 req-bc4a6ade-c704-47b1-8f2d-85da2dab28d7 service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received unexpected event network-vif-plugged-f1d1962c-c0f2-4e5b-9885-b71019f7e792 for instance with vm_state shelved_offloaded and task_state spawning. [ 1245.204271] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.204457] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.204706] env[62383]: DEBUG nova.network.neutron [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.337649] env[62383]: DEBUG nova.scheduler.client.report [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1245.842493] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.843645] env[62383]: DEBUG nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1245.959876] env[62383]: DEBUG nova.network.neutron [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.351027] env[62383]: DEBUG nova.compute.utils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1246.352080] env[62383]: DEBUG nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1246.352636] env[62383]: DEBUG nova.network.neutron [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1246.391274] env[62383]: DEBUG nova.policy [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a2580bedcb34bed8ea9a17bd56f7a6a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a329d49499d41a2b8f0abc05e7bb0a1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1246.462986] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.488115] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='eee92a2bdedc06d72f0c6198b0ba6439',container_format='bare',created_at=2025-02-11T15:35:40Z,direct_url=,disk_format='vmdk',id=c29d8c40-d10a-482c-893e-d9a6953f83ec,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1483593890-shelved',owner='b395bdf2df794b32a117f93fa4887c8e',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-02-11T15:35:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1246.488616] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1246.488845] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1246.489071] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1246.489231] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1246.489382] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1246.489599] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1246.489763] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1246.489934] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1246.490113] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1246.490292] env[62383]: DEBUG nova.virt.hardware [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1246.491164] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29cfc42-f757-4364-957b-6cef617c20fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.503666] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27c41c3-9dd0-40f1-839a-c71fdc9cce05 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.521096] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:77:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7908211b-df93-467b-87a8-3c3d29b03de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1d1962c-c0f2-4e5b-9885-b71019f7e792', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1246.530955] env[62383]: DEBUG oslo.service.loopingcall [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1246.531212] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1246.531591] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cfd7759-29a0-4988-bbb0-2af1bd5174e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.551348] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1246.551348] env[62383]: value = "task-2452588" [ 1246.551348] env[62383]: _type = "Task" [ 1246.551348] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.559921] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452588, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.656711] env[62383]: DEBUG nova.network.neutron [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Successfully created port: b4fa95e4-d3ed-4f7a-b546-c1db145d291a {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1246.812169] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.812748] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.855476] env[62383]: DEBUG nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1247.062175] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452588, 'name': CreateVM_Task, 'duration_secs': 0.330773} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.062342] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1247.062976] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1247.063154] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.063518] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1247.064125] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00b4d49d-12ab-498c-8333-80e20c5bc8cd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.068274] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1247.068274] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]525787f7-2484-72d4-3fad-98f3b870ac9c" [ 1247.068274] env[62383]: _type = "Task" [ 1247.068274] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.083835] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.084086] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Processing image c29d8c40-d10a-482c-893e-d9a6953f83ec {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1247.084313] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec/c29d8c40-d10a-482c-893e-d9a6953f83ec.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1247.084463] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec/c29d8c40-d10a-482c-893e-d9a6953f83ec.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.084678] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1247.085053] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d52556e7-4b73-461b-80fd-6612f879c53f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.101541] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1247.101781] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1247.102533] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ed468e-0c84-4d14-ac93-d9dc89a77590 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.107324] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1247.107324] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a6af11-4652-5d0b-f155-72f7fb0d9a03" [ 1247.107324] env[62383]: _type = "Task" [ 1247.107324] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.116018] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a6af11-4652-5d0b-f155-72f7fb0d9a03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.151520] env[62383]: DEBUG nova.compute.manager [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1247.151720] env[62383]: DEBUG nova.compute.manager [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing instance network info cache due to event network-changed-f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1247.151866] env[62383]: DEBUG oslo_concurrency.lockutils [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1247.152015] env[62383]: DEBUG oslo_concurrency.lockutils [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.152182] env[62383]: DEBUG nova.network.neutron [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Refreshing network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1247.316613] env[62383]: DEBUG nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1247.617828] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Preparing fetch location {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1247.618105] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Fetch image to [datastore2] OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739/OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739.vmdk {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1247.618293] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Downloading stream optimized image c29d8c40-d10a-482c-893e-d9a6953f83ec to [datastore2] OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739/OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739.vmdk on the data store datastore2 as vApp {{(pid=62383) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1247.618463] env[62383]: DEBUG nova.virt.vmwareapi.images [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Downloading image file data c29d8c40-d10a-482c-893e-d9a6953f83ec to the ESX as VM named 'OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739' {{(pid=62383) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1247.695541] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1247.695541] env[62383]: value = "resgroup-9" [ 1247.695541] env[62383]: _type = "ResourcePool" [ 1247.695541] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1247.695894] env[62383]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-678874d9-4c38-42f8-be1e-305075d724b2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.719473] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease: (returnval){ [ 1247.719473] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4f24f-71ee-c4d7-8bff-f33d15593e33" [ 1247.719473] env[62383]: _type = "HttpNfcLease" [ 1247.719473] env[62383]: } obtained for vApp import into resource pool (val){ [ 1247.719473] env[62383]: value = "resgroup-9" [ 1247.719473] env[62383]: _type = "ResourcePool" [ 1247.719473] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1247.719760] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the lease: (returnval){ [ 1247.719760] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4f24f-71ee-c4d7-8bff-f33d15593e33" [ 1247.719760] env[62383]: _type = "HttpNfcLease" [ 1247.719760] env[62383]: } to be ready. {{(pid=62383) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1247.726078] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1247.726078] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4f24f-71ee-c4d7-8bff-f33d15593e33" [ 1247.726078] env[62383]: _type = "HttpNfcLease" [ 1247.726078] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1247.840586] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.840850] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.842382] env[62383]: INFO nova.compute.claims [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1247.866134] env[62383]: DEBUG nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1247.887879] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1247.888125] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.888281] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1247.888458] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.888604] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1247.888756] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1247.888966] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1247.889284] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1247.889476] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1247.889646] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1247.889822] env[62383]: DEBUG nova.virt.hardware [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1247.890778] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e412e4-c2a1-4fef-bf88-0311da388a42 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.893971] env[62383]: DEBUG nova.network.neutron [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updated VIF entry in instance network info cache for port f1d1962c-c0f2-4e5b-9885-b71019f7e792. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.895119] env[62383]: DEBUG nova.network.neutron [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.902339] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4259ef78-9bc4-4c2d-b0d2-80d4a3a3bbc7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.010076] env[62383]: INFO nova.compute.manager [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Rebuilding instance [ 1248.046028] env[62383]: DEBUG nova.compute.manager [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1248.046900] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e22f622f-2713-42e0-bedc-06e816c312b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.108186] env[62383]: DEBUG nova.network.neutron [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Successfully updated port: b4fa95e4-d3ed-4f7a-b546-c1db145d291a {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1248.227553] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1248.227553] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4f24f-71ee-c4d7-8bff-f33d15593e33" [ 1248.227553] env[62383]: _type = "HttpNfcLease" [ 1248.227553] env[62383]: } is initializing. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1248.397890] env[62383]: DEBUG oslo_concurrency.lockutils [req-8cbc15bb-b20e-4dbf-9fc4-df56eb7bea62 req-3f3b5c66-b2ca-4288-91a4-5c828bdf4efd service nova] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.610628] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "refresh_cache-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.610628] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquired lock "refresh_cache-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.610628] env[62383]: DEBUG nova.network.neutron [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1248.728546] env[62383]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1248.728546] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4f24f-71ee-c4d7-8bff-f33d15593e33" [ 1248.728546] env[62383]: _type = "HttpNfcLease" [ 1248.728546] env[62383]: } is ready. {{(pid=62383) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1248.728857] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1248.728857] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4f24f-71ee-c4d7-8bff-f33d15593e33" [ 1248.728857] env[62383]: _type = "HttpNfcLease" [ 1248.728857] env[62383]: }. {{(pid=62383) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1248.729589] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fed46dc-10fa-4c87-a43c-7a10bdf761f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.736889] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b5834b-fee0-b28c-b67c-a2d94ccbe8fc/disk-0.vmdk from lease info. {{(pid=62383) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1248.737103] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b5834b-fee0-b28c-b67c-a2d94ccbe8fc/disk-0.vmdk. {{(pid=62383) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1248.800515] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a24d2eef-b23e-449e-aba3-8d2ddd8c628a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.917332] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f314359-dff3-48fe-bf5d-9e4f0cac075b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.925256] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff28f3c2-b776-448a-9b2c-c80ebea80038 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.960831] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a5ec43-c932-4582-9e99-bf12d2ef8316 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.968871] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95f4c52-d81b-4085-ab6a-bff0d0a84be8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.983120] env[62383]: DEBUG nova.compute.provider_tree [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.061017] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.061357] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24d09569-152c-4f33-9e39-e02417b29400 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.068210] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1249.068210] env[62383]: value = "task-2452590" [ 1249.068210] env[62383]: _type = "Task" [ 1249.068210] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.078660] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.145061] env[62383]: DEBUG nova.network.neutron [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1249.183298] env[62383]: DEBUG nova.compute.manager [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Received event network-vif-plugged-b4fa95e4-d3ed-4f7a-b546-c1db145d291a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1249.183782] env[62383]: DEBUG oslo_concurrency.lockutils [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] Acquiring lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.183782] env[62383]: DEBUG oslo_concurrency.lockutils [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.183990] env[62383]: DEBUG oslo_concurrency.lockutils [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.183990] env[62383]: DEBUG nova.compute.manager [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] No waiting events found dispatching network-vif-plugged-b4fa95e4-d3ed-4f7a-b546-c1db145d291a {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1249.184177] env[62383]: WARNING nova.compute.manager [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Received unexpected event network-vif-plugged-b4fa95e4-d3ed-4f7a-b546-c1db145d291a for instance with vm_state building and task_state spawning. [ 1249.184474] env[62383]: DEBUG nova.compute.manager [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Received event network-changed-b4fa95e4-d3ed-4f7a-b546-c1db145d291a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1249.184474] env[62383]: DEBUG nova.compute.manager [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Refreshing instance network info cache due to event network-changed-b4fa95e4-d3ed-4f7a-b546-c1db145d291a. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1249.184810] env[62383]: DEBUG oslo_concurrency.lockutils [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] Acquiring lock "refresh_cache-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.321253] env[62383]: DEBUG nova.network.neutron [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Updating instance_info_cache with network_info: [{"id": "b4fa95e4-d3ed-4f7a-b546-c1db145d291a", "address": "fa:16:3e:0c:fd:dc", "network": {"id": "599d885b-65a0-4ba1-b525-7df61b079b95", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-170756063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a329d49499d41a2b8f0abc05e7bb0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4fa95e4-d3", "ovs_interfaceid": "b4fa95e4-d3ed-4f7a-b546-c1db145d291a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.487544] env[62383]: DEBUG nova.scheduler.client.report [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1249.579848] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452590, 'name': PowerOffVM_Task, 'duration_secs': 0.183913} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.580228] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1249.581100] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.582743] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bea2cf9a-67cb-455f-bd6b-ddd835eb2905 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.589578] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1249.589578] env[62383]: value = "task-2452591" [ 1249.589578] env[62383]: _type = "Task" [ 1249.589578] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.599064] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.823812] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Releasing lock "refresh_cache-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.824260] env[62383]: DEBUG nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Instance network_info: |[{"id": "b4fa95e4-d3ed-4f7a-b546-c1db145d291a", "address": "fa:16:3e:0c:fd:dc", "network": {"id": "599d885b-65a0-4ba1-b525-7df61b079b95", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-170756063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a329d49499d41a2b8f0abc05e7bb0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4fa95e4-d3", "ovs_interfaceid": "b4fa95e4-d3ed-4f7a-b546-c1db145d291a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1249.824577] env[62383]: DEBUG oslo_concurrency.lockutils [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] Acquired lock "refresh_cache-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.824885] env[62383]: DEBUG nova.network.neutron [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Refreshing network info cache for port b4fa95e4-d3ed-4f7a-b546-c1db145d291a {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1249.826220] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:fd:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4fa95e4-d3ed-4f7a-b546-c1db145d291a', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1249.834176] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Creating folder: Project (8a329d49499d41a2b8f0abc05e7bb0a1). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1249.837470] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16ae7664-5e77-44b0-aef6-46ebfa568906 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.849597] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Created folder: Project (8a329d49499d41a2b8f0abc05e7bb0a1) in parent group-v496304. [ 1249.849756] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Creating folder: Instances. Parent ref: group-v496624. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1249.850046] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c31f8c3-1ea4-45a4-a864-f736e3975eb9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.860157] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Created folder: Instances in parent group-v496624. [ 1249.860437] env[62383]: DEBUG oslo.service.loopingcall [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1249.860662] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1249.860961] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23aa4c4c-59c1-4496-866b-add70c40a4fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.883872] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1249.883872] env[62383]: value = "task-2452594" [ 1249.883872] env[62383]: _type = "Task" [ 1249.883872] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.884763] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Completed reading data from the image iterator. {{(pid=62383) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1249.884999] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b5834b-fee0-b28c-b67c-a2d94ccbe8fc/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1249.885942] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0164d7b3-3b18-4a6a-a474-636d438e3ac2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.894380] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b5834b-fee0-b28c-b67c-a2d94ccbe8fc/disk-0.vmdk is in state: ready. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1249.894546] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b5834b-fee0-b28c-b67c-a2d94ccbe8fc/disk-0.vmdk. {{(pid=62383) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1249.897689] env[62383]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-cb1724d0-3a37-45bd-9b86-2efcf7f4ad08 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.899016] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452594, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.992776] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.152s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.993448] env[62383]: DEBUG nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1250.101752] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] VM already powered off {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1250.101971] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1250.102143] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496615', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'name': 'volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '991d6eb0-94e7-4d7c-bd85-3c8ef28daa49', 'attached_at': '', 'detached_at': '', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'serial': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1250.102976] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24342e57-2063-4719-b61b-87affca113d9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.121759] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7205ae-df12-4386-980d-10c4290c2569 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.128404] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a80afa6-7f91-415c-905f-9be3b86eff9f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.146217] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cb4a1a-2303-4caf-9e78-a564112a7457 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.160957] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] The volume has not been displaced from its original location: [datastore2] volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07/volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1250.166190] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1250.166510] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca4869e0-c6f9-4119-96cb-4c51f07b52f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.183938] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1250.183938] env[62383]: value = "task-2452595" [ 1250.183938] env[62383]: _type = "Task" [ 1250.183938] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.191811] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452595, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.372342] env[62383]: DEBUG oslo_vmware.rw_handles [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b5834b-fee0-b28c-b67c-a2d94ccbe8fc/disk-0.vmdk. {{(pid=62383) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1250.372564] env[62383]: INFO nova.virt.vmwareapi.images [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Downloaded image file data c29d8c40-d10a-482c-893e-d9a6953f83ec [ 1250.373525] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1027d1ff-9914-4b5e-94f3-b0e059248a5d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.391975] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fdbd5eea-5758-422e-9548-a37e4b9da50c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.399311] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452594, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.419049] env[62383]: INFO nova.virt.vmwareapi.images [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] The imported VM was unregistered [ 1250.421406] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Caching image {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1250.421640] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Creating directory with path [datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1250.423981] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af7325f6-c9a0-415f-97f0-7b9adf3b9947 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.434135] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Created directory with path [datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1250.434324] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739/OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739.vmdk to [datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec/c29d8c40-d10a-482c-893e-d9a6953f83ec.vmdk. {{(pid=62383) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1250.434578] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-7aee1e7d-6d88-494f-a579-4d4a2ecf2d0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.443774] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1250.443774] env[62383]: value = "task-2452597" [ 1250.443774] env[62383]: _type = "Task" [ 1250.443774] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.453879] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452597, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.499023] env[62383]: DEBUG nova.compute.utils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1250.500579] env[62383]: DEBUG nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1250.500938] env[62383]: DEBUG nova.network.neutron [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1250.555402] env[62383]: DEBUG nova.policy [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e092981766f4f63adaa0cbbb78fff9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0f48fbe0a7c49cf866e39daf3b5cf3c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1250.596295] env[62383]: DEBUG nova.network.neutron [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Updated VIF entry in instance network info cache for port b4fa95e4-d3ed-4f7a-b546-c1db145d291a. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1250.596648] env[62383]: DEBUG nova.network.neutron [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Updating instance_info_cache with network_info: [{"id": "b4fa95e4-d3ed-4f7a-b546-c1db145d291a", "address": "fa:16:3e:0c:fd:dc", "network": {"id": "599d885b-65a0-4ba1-b525-7df61b079b95", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-170756063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8a329d49499d41a2b8f0abc05e7bb0a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4fa95e4-d3", "ovs_interfaceid": "b4fa95e4-d3ed-4f7a-b546-c1db145d291a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.696254] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452595, 'name': ReconfigVM_Task, 'duration_secs': 0.287827} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.696537] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1250.701675] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e726484-dd1f-4fca-ae35-89531cba41a8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.720741] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1250.720741] env[62383]: value = "task-2452598" [ 1250.720741] env[62383]: _type = "Task" [ 1250.720741] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.731637] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452598, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.886293] env[62383]: DEBUG nova.network.neutron [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Successfully created port: 7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1250.900389] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452594, 'name': CreateVM_Task, 'duration_secs': 0.524908} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.900550] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1250.901310] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.901482] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.901840] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1250.902202] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53234dc2-cb22-479c-82c5-39709b38496b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.909403] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1250.909403] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524baba2-cf6b-11a9-726a-2edecc0fc1b8" [ 1250.909403] env[62383]: _type = "Task" [ 1250.909403] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.920504] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524baba2-cf6b-11a9-726a-2edecc0fc1b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.957017] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452597, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.004055] env[62383]: DEBUG nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1251.100073] env[62383]: DEBUG oslo_concurrency.lockutils [req-4ec4c964-1cd4-4876-bc06-c91d6bc22e45 req-ee89baac-b7d7-4f29-8648-4ec990c46020 service nova] Releasing lock "refresh_cache-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.232721] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452598, 'name': ReconfigVM_Task, 'duration_secs': 0.235022} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.233077] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496615', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'name': 'volume-6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '991d6eb0-94e7-4d7c-bd85-3c8ef28daa49', 'attached_at': '', 'detached_at': '', 'volume_id': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07', 'serial': '6e38b46a-eb2c-4e2f-a981-9969c1d37f07'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1251.233418] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1251.234272] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cef2e2-8309-40e4-892d-7d4d909a17b7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.243381] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1251.243640] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62709275-1dd7-450d-9055-ab776e3a87f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.319136] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1251.319391] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1251.319602] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Deleting the datastore file [datastore2] 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1251.319972] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc002e85-2c07-41ac-a266-229149431cb1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.329449] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for the task: (returnval){ [ 1251.329449] env[62383]: value = "task-2452600" [ 1251.329449] env[62383]: _type = "Task" [ 1251.329449] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.340968] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.423316] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524baba2-cf6b-11a9-726a-2edecc0fc1b8, 'name': SearchDatastore_Task, 'duration_secs': 0.086677} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.423665] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.424015] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1251.424267] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.424419] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.424605] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1251.425036] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b11df31-d3cc-414f-8d59-614ae328b34e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.444536] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1251.444794] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1251.445651] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bea3d73-f547-4fd6-99bd-0f97dc25371b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.456830] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452597, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.458678] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1251.458678] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52949e67-9b22-ec1d-92e7-225dd0722efd" [ 1251.458678] env[62383]: _type = "Task" [ 1251.458678] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.468706] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52949e67-9b22-ec1d-92e7-225dd0722efd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.842015] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.956860] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452597, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.972056] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52949e67-9b22-ec1d-92e7-225dd0722efd, 'name': SearchDatastore_Task, 'duration_secs': 0.081664} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.972588] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff4af8c5-dbf6-4014-b9aa-00094671b839 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.980383] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1251.980383] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]526ecbd8-989e-e2d7-2ebc-61cc41ce9f8b" [ 1251.980383] env[62383]: _type = "Task" [ 1251.980383] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.990343] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526ecbd8-989e-e2d7-2ebc-61cc41ce9f8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.021082] env[62383]: DEBUG nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1252.080912] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1252.081082] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1252.081264] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1252.081453] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1252.081604] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1252.081758] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1252.081982] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1252.082162] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1252.082333] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1252.082501] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1252.082673] env[62383]: DEBUG nova.virt.hardware [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1252.083590] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da123370-331d-4300-ab45-7341463345e5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.094756] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f7e7d9-43bb-4507-b9ca-515bc24c6db2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.340349] env[62383]: DEBUG oslo_vmware.api [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Task: {'id': task-2452600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.934707} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.340637] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1252.340787] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1252.340963] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1252.402844] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1252.403241] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67a58745-b06e-4ab0-b678-652eef88913a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.415912] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec7c84b-a40e-4c88-976a-94568f160159 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.445120] env[62383]: ERROR nova.compute.manager [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Failed to detach volume 6e38b46a-eb2c-4e2f-a981-9969c1d37f07 from /dev/sda: nova.exception.InstanceNotFound: Instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 could not be found. [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Traceback (most recent call last): [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self.driver.rebuild(**kwargs) [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] raise NotImplementedError() [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] NotImplementedError [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] During handling of the above exception, another exception occurred: [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Traceback (most recent call last): [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self.driver.detach_volume(context, old_connection_info, [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] return self._volumeops.detach_volume(connection_info, instance) [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self._detach_volume_vmdk(connection_info, instance) [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] stable_ref.fetch_moref(session) [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] nova.exception.InstanceNotFound: Instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 could not be found. [ 1252.445120] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.456890] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452597, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.492429] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]526ecbd8-989e-e2d7-2ebc-61cc41ce9f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.07664} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.492754] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.493030] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8/8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1252.493304] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da0ac2f3-c857-4284-9a8b-04fb3be7d6fe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.501986] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1252.501986] env[62383]: value = "task-2452601" [ 1252.501986] env[62383]: _type = "Task" [ 1252.501986] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.512052] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.518506] env[62383]: DEBUG nova.compute.manager [req-f91a44cd-e090-4aae-ae9b-d00455a6d98a req-6422db00-9edd-4eb1-a3bb-93cb241975b3 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Received event network-vif-plugged-7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1252.518732] env[62383]: DEBUG oslo_concurrency.lockutils [req-f91a44cd-e090-4aae-ae9b-d00455a6d98a req-6422db00-9edd-4eb1-a3bb-93cb241975b3 service nova] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.518943] env[62383]: DEBUG oslo_concurrency.lockutils [req-f91a44cd-e090-4aae-ae9b-d00455a6d98a req-6422db00-9edd-4eb1-a3bb-93cb241975b3 service nova] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.519128] env[62383]: DEBUG oslo_concurrency.lockutils [req-f91a44cd-e090-4aae-ae9b-d00455a6d98a req-6422db00-9edd-4eb1-a3bb-93cb241975b3 service nova] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.519299] env[62383]: DEBUG nova.compute.manager [req-f91a44cd-e090-4aae-ae9b-d00455a6d98a req-6422db00-9edd-4eb1-a3bb-93cb241975b3 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] No waiting events found dispatching network-vif-plugged-7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1252.519465] env[62383]: WARNING nova.compute.manager [req-f91a44cd-e090-4aae-ae9b-d00455a6d98a req-6422db00-9edd-4eb1-a3bb-93cb241975b3 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Received unexpected event network-vif-plugged-7cbb97fd-c997-46f0-8174-980bf3cec122 for instance with vm_state building and task_state spawning. [ 1252.582164] env[62383]: DEBUG nova.compute.utils [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Build of instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 aborted: Failed to rebuild volume backed instance. {{(pid=62383) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1252.584627] env[62383]: ERROR nova.compute.manager [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 aborted: Failed to rebuild volume backed instance. [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Traceback (most recent call last): [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self.driver.rebuild(**kwargs) [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] raise NotImplementedError() [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] NotImplementedError [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] During handling of the above exception, another exception occurred: [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Traceback (most recent call last): [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self._detach_root_volume(context, instance, root_bdm) [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] with excutils.save_and_reraise_exception(): [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self.force_reraise() [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] raise self.value [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self.driver.detach_volume(context, old_connection_info, [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] return self._volumeops.detach_volume(connection_info, instance) [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self._detach_volume_vmdk(connection_info, instance) [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] stable_ref.fetch_moref(session) [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] nova.exception.InstanceNotFound: Instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 could not be found. [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] During handling of the above exception, another exception occurred: [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Traceback (most recent call last): [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 11382, in _error_out_instance_on_exception [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] yield [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1252.584627] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self._do_rebuild_instance_with_claim( [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self._do_rebuild_instance( [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self._rebuild_default_impl(**kwargs) [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] self._rebuild_volume_backed_instance( [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] raise exception.BuildAbortException( [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] nova.exception.BuildAbortException: Build of instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 aborted: Failed to rebuild volume backed instance. [ 1252.586355] env[62383]: ERROR nova.compute.manager [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] [ 1252.632869] env[62383]: DEBUG nova.network.neutron [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Successfully updated port: 7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1252.961067] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452597, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.300964} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.961067] env[62383]: INFO nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739/OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739.vmdk to [datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec/c29d8c40-d10a-482c-893e-d9a6953f83ec.vmdk. [ 1252.961067] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Cleaning up location [datastore2] OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1252.961583] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_0f3efa85-2964-44fc-bfee-be5424fe3739 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1252.961583] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f0711f9-cab9-4692-a20a-90c30db77882 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.970181] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1252.970181] env[62383]: value = "task-2452602" [ 1252.970181] env[62383]: _type = "Task" [ 1252.970181] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.980232] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.012749] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452601, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.135756] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.136062] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.136062] env[62383]: DEBUG nova.network.neutron [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1253.480483] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120842} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.480785] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.480996] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec/c29d8c40-d10a-482c-893e-d9a6953f83ec.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.481321] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec/c29d8c40-d10a-482c-893e-d9a6953f83ec.vmdk to [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1253.481601] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d49761f4-7c51-4539-b5e2-8cc930bf6d16 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.488066] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1253.488066] env[62383]: value = "task-2452603" [ 1253.488066] env[62383]: _type = "Task" [ 1253.488066] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.495799] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.511063] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585479} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.511341] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8/8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1253.511592] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1253.511854] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbee85ba-935c-40a9-9763-dfcd6f727980 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.517836] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1253.517836] env[62383]: value = "task-2452604" [ 1253.517836] env[62383]: _type = "Task" [ 1253.517836] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.525663] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452604, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.669645] env[62383]: DEBUG nova.network.neutron [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1253.807952] env[62383]: DEBUG nova.network.neutron [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [{"id": "7cbb97fd-c997-46f0-8174-980bf3cec122", "address": "fa:16:3e:47:0a:0f", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbb97fd-c9", "ovs_interfaceid": "7cbb97fd-c997-46f0-8174-980bf3cec122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.997524] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452603, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.026310] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452604, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068316} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.026593] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1254.027391] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f0626e-1841-4099-8d36-16ed03c96ee8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.050899] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8/8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1254.051262] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2dabf45-4b50-4a82-8d65-53f015f59f48 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.073400] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1254.073400] env[62383]: value = "task-2452605" [ 1254.073400] env[62383]: _type = "Task" [ 1254.073400] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.082242] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452605, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.311033] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.311498] env[62383]: DEBUG nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Instance network_info: |[{"id": "7cbb97fd-c997-46f0-8174-980bf3cec122", "address": "fa:16:3e:47:0a:0f", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbb97fd-c9", "ovs_interfaceid": "7cbb97fd-c997-46f0-8174-980bf3cec122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1254.311901] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:0a:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cbb97fd-c997-46f0-8174-980bf3cec122', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1254.320678] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Creating folder: Project (e0f48fbe0a7c49cf866e39daf3b5cf3c). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1254.320678] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9af9012-9b7b-45c2-94ef-8ca9514d5616 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.333683] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Created folder: Project (e0f48fbe0a7c49cf866e39daf3b5cf3c) in parent group-v496304. [ 1254.333898] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Creating folder: Instances. Parent ref: group-v496627. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1254.334295] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82caa457-d6d1-4313-90e5-32f0054c0ea0 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.347291] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Created folder: Instances in parent group-v496627. [ 1254.347700] env[62383]: DEBUG oslo.service.loopingcall [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1254.347751] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1254.348146] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a75fe66-325d-425d-b43d-45cc3efc9b07 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.369909] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1254.369909] env[62383]: value = "task-2452608" [ 1254.369909] env[62383]: _type = "Task" [ 1254.369909] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.379278] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452608, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.499721] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452603, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.548789] env[62383]: DEBUG nova.compute.manager [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Received event network-changed-7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1254.549032] env[62383]: DEBUG nova.compute.manager [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Refreshing instance network info cache due to event network-changed-7cbb97fd-c997-46f0-8174-980bf3cec122. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1254.549416] env[62383]: DEBUG oslo_concurrency.lockutils [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] Acquiring lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.549615] env[62383]: DEBUG oslo_concurrency.lockutils [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] Acquired lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.549800] env[62383]: DEBUG nova.network.neutron [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Refreshing network info cache for port 7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1254.586514] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.600741] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.601033] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.672809] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd85df0d-eac3-4ac7-a141-1ed122eaa005 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.683334] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aecf972-108e-4f9f-bc45-5f50029cb933 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.717829] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872e3f9e-3ee3-4b80-8796-680d595f4a15 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.727589] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e0b718-88cd-430d-82f4-f4933ad3c187 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.743363] env[62383]: DEBUG nova.compute.provider_tree [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1254.881869] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452608, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.004110] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452603, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.007766] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquiring lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.008052] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.008290] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquiring lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.008481] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.008676] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.011541] env[62383]: INFO nova.compute.manager [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Terminating instance [ 1255.089174] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.246893] env[62383]: DEBUG nova.scheduler.client.report [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1255.285764] env[62383]: DEBUG nova.network.neutron [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updated VIF entry in instance network info cache for port 7cbb97fd-c997-46f0-8174-980bf3cec122. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1255.286154] env[62383]: DEBUG nova.network.neutron [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [{"id": "7cbb97fd-c997-46f0-8174-980bf3cec122", "address": "fa:16:3e:47:0a:0f", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbb97fd-c9", "ovs_interfaceid": "7cbb97fd-c997-46f0-8174-980bf3cec122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.383156] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452608, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.506940] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452603, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.516215] env[62383]: DEBUG nova.compute.manager [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1255.516603] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-179ff066-0745-45e0-a824-ae089f9963f1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.527739] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06d88aa-6064-4969-a5d1-07c8099fe0d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.559589] env[62383]: WARNING nova.virt.vmwareapi.driver [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 could not be found. [ 1255.559950] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1255.560332] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fa1a273-aaa7-4775-a836-b79431d9eaae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.570496] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfd7618-033b-4ae3-9a5c-0c419786b3dc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.592310] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.603199] env[62383]: WARNING nova.virt.vmwareapi.vmops [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 could not be found. [ 1255.603466] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1255.603621] env[62383]: INFO nova.compute.manager [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1255.603873] env[62383]: DEBUG oslo.service.loopingcall [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1255.604157] env[62383]: DEBUG nova.compute.manager [-] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1255.604270] env[62383]: DEBUG nova.network.neutron [-] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1255.752453] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.151s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.752685] env[62383]: INFO nova.compute.manager [None req-d48bbe14-e8d2-478d-9f4e-7bab97fc56e0 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Successfully reverted task state from rebuilding on failure for instance. [ 1255.788756] env[62383]: DEBUG oslo_concurrency.lockutils [req-79bf9305-4e98-466a-b901-b92996e85082 req-9158f18b-fb86-4087-8ce8-0e8b0a4a9362 service nova] Releasing lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.883140] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452608, 'name': CreateVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.002941] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452603, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.094516] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.385504] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452608, 'name': CreateVM_Task, 'duration_secs': 1.886358} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.385741] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1256.387819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.387819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.387819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1256.387819] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1461ab9f-edab-44b0-b6a4-7fd525685af7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.391571] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1256.391571] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a80459-e1e1-a9fa-937b-e15d50e8fda9" [ 1256.391571] env[62383]: _type = "Task" [ 1256.391571] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.399179] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a80459-e1e1-a9fa-937b-e15d50e8fda9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.501474] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452603, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.670324} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.501803] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c29d8c40-d10a-482c-893e-d9a6953f83ec/c29d8c40-d10a-482c-893e-d9a6953f83ec.vmdk to [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1256.502752] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275ad2a3-8f36-482c-bbb9-384621a1198c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.524839] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.524839] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac83114c-1acc-4643-95bd-b36da34dee6f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.544798] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1256.544798] env[62383]: value = "task-2452609" [ 1256.544798] env[62383]: _type = "Task" [ 1256.544798] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.552851] env[62383]: DEBUG nova.network.neutron [-] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.554115] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.577567] env[62383]: DEBUG nova.compute.manager [req-bc89ffac-e935-4ee8-9e7b-4b672418142e req-958d44c1-8504-40a0-a74a-f1f68bff2da6 service nova] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Received event network-vif-deleted-58a9319d-b343-4caf-904d-91af9410d121 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1256.594256] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452605, 'name': ReconfigVM_Task, 'duration_secs': 2.181823} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.594619] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Reconfigured VM instance instance-00000079 to attach disk [datastore2] 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8/8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1256.595296] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5accb09-55ee-42bb-9551-92c2ff45fea6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.601645] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1256.601645] env[62383]: value = "task-2452610" [ 1256.601645] env[62383]: _type = "Task" [ 1256.601645] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.610347] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452610, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.903553] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a80459-e1e1-a9fa-937b-e15d50e8fda9, 'name': SearchDatastore_Task, 'duration_secs': 0.010379} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.903859] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.904103] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1256.904334] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.904480] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.904655] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1256.904936] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-749a5699-30e3-4b3c-b466-5239c80da178 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.912656] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1256.912851] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1256.913561] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8017c7bc-ac23-4876-91bd-99b7460b51d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.920206] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1256.920206] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524f4367-0cef-0b78-1f2d-dafedd7a411a" [ 1256.920206] env[62383]: _type = "Task" [ 1256.920206] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.925645] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524f4367-0cef-0b78-1f2d-dafedd7a411a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.055276] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452609, 'name': ReconfigVM_Task, 'duration_secs': 0.270347} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.055567] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Reconfigured VM instance instance-00000076 to attach disk [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3/b8e512cd-5eb9-423c-9447-833e34909bc3.vmdk or device None with type streamOptimized {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.056347] env[62383]: INFO nova.compute.manager [-] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Took 1.45 seconds to deallocate network for instance. [ 1257.056561] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81584ef4-654d-4f68-bbbd-a81fa820fe14 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.064251] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1257.064251] env[62383]: value = "task-2452611" [ 1257.064251] env[62383]: _type = "Task" [ 1257.064251] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.071595] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452611, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.111861] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452610, 'name': Rename_Task, 'duration_secs': 0.147683} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.112071] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.112265] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b65c5cc8-56ee-4a79-bd0a-d06126f4c763 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.117655] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1257.117655] env[62383]: value = "task-2452612" [ 1257.117655] env[62383]: _type = "Task" [ 1257.117655] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.125021] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452612, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.427902] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524f4367-0cef-0b78-1f2d-dafedd7a411a, 'name': SearchDatastore_Task, 'duration_secs': 0.007923} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.428671] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07831267-b10d-464d-b0c5-f895faeeda97 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.433460] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1257.433460] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52c9ebf2-c566-d6ca-0dc1-0e00d0beadeb" [ 1257.433460] env[62383]: _type = "Task" [ 1257.433460] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.440754] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c9ebf2-c566-d6ca-0dc1-0e00d0beadeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.577608] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452611, 'name': Rename_Task, 'duration_secs': 0.147253} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.577608] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.577938] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e096486b-519c-4a04-9e67-34db366d2432 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.586172] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1257.586172] env[62383]: value = "task-2452613" [ 1257.586172] env[62383]: _type = "Task" [ 1257.586172] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.596913] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.613039] env[62383]: INFO nova.compute.manager [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Took 0.55 seconds to detach 1 volumes for instance. [ 1257.617581] env[62383]: DEBUG nova.compute.manager [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Deleting volume: 6e38b46a-eb2c-4e2f-a981-9969c1d37f07 {{(pid=62383) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1257.631208] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452612, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.943287] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52c9ebf2-c566-d6ca-0dc1-0e00d0beadeb, 'name': SearchDatastore_Task, 'duration_secs': 0.010155} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.943554] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.943826] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] e05e6a48-6992-41f2-a937-2f8e022cf1f5/e05e6a48-6992-41f2-a937-2f8e022cf1f5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1257.944138] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3714848-6b14-4b01-ae06-8d2a336c0c3c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.949983] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1257.949983] env[62383]: value = "task-2452615" [ 1257.949983] env[62383]: _type = "Task" [ 1257.949983] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.957300] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.096800] env[62383]: DEBUG oslo_vmware.api [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452613, 'name': PowerOnVM_Task, 'duration_secs': 0.487272} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.097083] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.129566] env[62383]: DEBUG oslo_vmware.api [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452612, 'name': PowerOnVM_Task, 'duration_secs': 0.512375} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.129876] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1258.130085] env[62383]: INFO nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Took 10.26 seconds to spawn the instance on the hypervisor. [ 1258.130391] env[62383]: DEBUG nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1258.131249] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61d8fc9-537b-4b05-b06e-66746300a207 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.193579] env[62383]: DEBUG nova.compute.manager [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1258.194559] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e69a6c-3e85-46f9-95ee-4d8d66a81a26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.200522] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.200779] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.200996] env[62383]: DEBUG nova.objects.instance [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lazy-loading 'resources' on Instance uuid 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1258.460543] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.649134] env[62383]: INFO nova.compute.manager [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Took 18.13 seconds to build instance. [ 1258.718653] env[62383]: DEBUG oslo_concurrency.lockutils [None req-ea01779f-0784-48f3-a07c-04c16f1b5c09 tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.228s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.792724] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981cf856-b0ce-4c55-80ff-111a8df10b23 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.805037] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a86942-eaa7-4876-b8d9-5594ac8d564d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.840696] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433370ad-cc40-4774-8b4c-d2da345c817b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.850227] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29da1b25-0fb8-4ae3-87c6-8942bacbb665 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.867031] env[62383]: DEBUG nova.compute.provider_tree [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.965138] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.987872} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.965427] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] e05e6a48-6992-41f2-a937-2f8e022cf1f5/e05e6a48-6992-41f2-a937-2f8e022cf1f5.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1258.965652] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1258.965933] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6e004b8-162c-46d0-953b-069cc1e87803 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.973904] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1258.973904] env[62383]: value = "task-2452616" [ 1258.973904] env[62383]: _type = "Task" [ 1258.973904] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.982574] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.151211] env[62383]: DEBUG oslo_concurrency.lockutils [None req-303b124e-b069-4816-af5e-0a273ad7864d tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.638s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.163851] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.164100] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.370186] env[62383]: DEBUG nova.scheduler.client.report [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1259.447769] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.448015] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.460986] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5c6ffb51-21d5-44bb-b13d-a6da98e3d4f5 tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "interface-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.461241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5c6ffb51-21d5-44bb-b13d-a6da98e3d4f5 tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "interface-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.461527] env[62383]: DEBUG nova.objects.instance [None req-5c6ffb51-21d5-44bb-b13d-a6da98e3d4f5 tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lazy-loading 'flavor' on Instance uuid 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.484558] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.251836} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.484802] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1259.485615] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9a3c91-5b1a-4122-93d2-cbf231a71786 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.506940] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] e05e6a48-6992-41f2-a937-2f8e022cf1f5/e05e6a48-6992-41f2-a937-2f8e022cf1f5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1259.507204] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49b51b30-02d5-4dc8-86f5-828bee45586d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.526671] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1259.526671] env[62383]: value = "task-2452617" [ 1259.526671] env[62383]: _type = "Task" [ 1259.526671] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.536037] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452617, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.666512] env[62383]: DEBUG nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1259.874857] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.951058] env[62383]: DEBUG nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1259.969260] env[62383]: DEBUG nova.objects.instance [None req-5c6ffb51-21d5-44bb-b13d-a6da98e3d4f5 tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lazy-loading 'pci_requests' on Instance uuid 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.036656] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.187776] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.188166] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.189836] env[62383]: INFO nova.compute.claims [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1260.394798] env[62383]: DEBUG oslo_concurrency.lockutils [None req-910ff021-40a6-425d-8dbe-b1329545baf9 tempest-ServerActionsV293TestJSON-491372817 tempest-ServerActionsV293TestJSON-491372817-project-member] Lock "991d6eb0-94e7-4d7c-bd85-3c8ef28daa49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.387s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.469279] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.472014] env[62383]: DEBUG nova.objects.base [None req-5c6ffb51-21d5-44bb-b13d-a6da98e3d4f5 tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Object Instance<8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8> lazy-loaded attributes: flavor,pci_requests {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1260.472234] env[62383]: DEBUG nova.network.neutron [None req-5c6ffb51-21d5-44bb-b13d-a6da98e3d4f5 tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1260.537981] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452617, 'name': ReconfigVM_Task, 'duration_secs': 0.741082} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.538278] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Reconfigured VM instance instance-0000007a to attach disk [datastore2] e05e6a48-6992-41f2-a937-2f8e022cf1f5/e05e6a48-6992-41f2-a937-2f8e022cf1f5.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1260.538913] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67c6f2a5-92dd-4dd8-8f9e-ed233e63958c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.546803] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1260.546803] env[62383]: value = "task-2452618" [ 1260.546803] env[62383]: _type = "Task" [ 1260.546803] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.558034] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452618, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.559402] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5c6ffb51-21d5-44bb-b13d-a6da98e3d4f5 tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "interface-8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.098s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.057070] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452618, 'name': Rename_Task, 'duration_secs': 0.175349} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.057348] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1261.057587] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c574f37b-ac63-41d7-9e2e-84f3ba580082 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.063751] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1261.063751] env[62383]: value = "task-2452619" [ 1261.063751] env[62383]: _type = "Task" [ 1261.063751] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.071511] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.280729] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c99d35-bccd-4ffd-87d7-99adaaf3de65 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.288352] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5944a1a6-a5f6-4968-b38a-051b532021d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.319307] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d4d31a-b7c4-4878-be13-13431f4cf588 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.326494] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6c2991-3ca3-4bea-8b44-5cbcd0582136 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.339721] env[62383]: DEBUG nova.compute.provider_tree [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.577627] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452619, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.842701] env[62383]: DEBUG nova.scheduler.client.report [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1262.079607] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452619, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.347895] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.160s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.426679] env[62383]: DEBUG nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1262.426679] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.883s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.426679] env[62383]: INFO nova.compute.claims [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1262.528206] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.528577] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.528853] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1262.529134] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1262.529376] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1262.531822] env[62383]: INFO nova.compute.manager [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Terminating instance [ 1262.579154] env[62383]: DEBUG oslo_vmware.api [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452619, 'name': PowerOnVM_Task, 'duration_secs': 1.02007} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.579515] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1262.579676] env[62383]: INFO nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Took 10.56 seconds to spawn the instance on the hypervisor. [ 1262.579888] env[62383]: DEBUG nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1262.580741] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bded65-76fe-4325-94d1-c300079e3403 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.859867] env[62383]: DEBUG nova.compute.utils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1262.864290] env[62383]: DEBUG nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Not allocating networking since 'none' was specified. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1263.035711] env[62383]: DEBUG nova.compute.manager [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1263.036064] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1263.036926] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1bdf7a-b5a0-40d3-9ab9-6a4d20b70f9c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.045373] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1263.045632] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccca5447-e9ed-4fa5-bebe-8a8f65b969ab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.052085] env[62383]: DEBUG oslo_vmware.api [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1263.052085] env[62383]: value = "task-2452620" [ 1263.052085] env[62383]: _type = "Task" [ 1263.052085] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.059544] env[62383]: DEBUG oslo_vmware.api [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452620, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.101589] env[62383]: INFO nova.compute.manager [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Took 15.28 seconds to build instance. [ 1263.365192] env[62383]: DEBUG nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1263.452647] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ba365d-cf6f-4241-8436-ccb7dd96157f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.460235] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84717cf-c0a1-4b3e-acd1-424d8a9f05df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.491511] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cc5612-b20c-4799-99ae-7fed26d4b6d2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.498532] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760a1fed-dffc-47d3-9e4b-b9bc2bf78251 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.511369] env[62383]: DEBUG nova.compute.provider_tree [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.541285] env[62383]: DEBUG nova.compute.manager [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Received event network-changed-7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1263.541371] env[62383]: DEBUG nova.compute.manager [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Refreshing instance network info cache due to event network-changed-7cbb97fd-c997-46f0-8174-980bf3cec122. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1263.541525] env[62383]: DEBUG oslo_concurrency.lockutils [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] Acquiring lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1263.541669] env[62383]: DEBUG oslo_concurrency.lockutils [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] Acquired lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1263.541977] env[62383]: DEBUG nova.network.neutron [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Refreshing network info cache for port 7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1263.563022] env[62383]: DEBUG oslo_vmware.api [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452620, 'name': PowerOffVM_Task, 'duration_secs': 0.226585} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.563247] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1263.563423] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1263.563684] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17bf63c5-77af-44eb-a3eb-b74e4f9e25dd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.605172] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d7dc789e-9dd6-41df-a6ae-d39975317c46 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.792s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.627956] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1263.628283] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1263.628439] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Deleting the datastore file [datastore2] 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1263.628706] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-683aa3c1-5fe6-4fda-a01e-f91b525da325 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.635398] env[62383]: DEBUG oslo_vmware.api [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for the task: (returnval){ [ 1263.635398] env[62383]: value = "task-2452622" [ 1263.635398] env[62383]: _type = "Task" [ 1263.635398] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.643009] env[62383]: DEBUG oslo_vmware.api [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452622, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.013944] env[62383]: DEBUG nova.scheduler.client.report [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1264.146718] env[62383]: DEBUG oslo_vmware.api [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Task: {'id': task-2452622, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170893} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.146977] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1264.147179] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1264.147356] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1264.147527] env[62383]: INFO nova.compute.manager [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1264.148200] env[62383]: DEBUG oslo.service.loopingcall [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1264.148200] env[62383]: DEBUG nova.compute.manager [-] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1264.148200] env[62383]: DEBUG nova.network.neutron [-] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1264.327276] env[62383]: DEBUG nova.network.neutron [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updated VIF entry in instance network info cache for port 7cbb97fd-c997-46f0-8174-980bf3cec122. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1264.327624] env[62383]: DEBUG nova.network.neutron [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [{"id": "7cbb97fd-c997-46f0-8174-980bf3cec122", "address": "fa:16:3e:47:0a:0f", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbb97fd-c9", "ovs_interfaceid": "7cbb97fd-c997-46f0-8174-980bf3cec122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.376599] env[62383]: DEBUG nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1264.404300] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1264.404692] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1264.404885] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1264.405112] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1264.405270] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1264.405425] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1264.405636] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1264.405795] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1264.405971] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1264.406146] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1264.406320] env[62383]: DEBUG nova.virt.hardware [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1264.407181] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353d41b7-1bb4-440d-845a-88100985d796 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.415201] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009dc163-dc56-4610-a327-4e349a3e3c9e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.428817] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1264.434327] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Creating folder: Project (43167f39e1d44da7a5f420a0227cb26a). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1264.435041] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fae05a4-a2c8-4eb5-b7ff-034db6221ae8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.446328] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Created folder: Project (43167f39e1d44da7a5f420a0227cb26a) in parent group-v496304. [ 1264.446870] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Creating folder: Instances. Parent ref: group-v496630. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1264.447154] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7da65019-5223-4b9f-8580-44667d944425 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.458586] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Created folder: Instances in parent group-v496630. [ 1264.458866] env[62383]: DEBUG oslo.service.loopingcall [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1264.459077] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1264.459290] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd57c73c-243a-45bf-95c5-031353d34e9b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.478024] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1264.478024] env[62383]: value = "task-2452625" [ 1264.478024] env[62383]: _type = "Task" [ 1264.478024] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.486152] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452625, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.493057] env[62383]: DEBUG nova.compute.manager [req-6b14f543-9065-4075-9f54-227bb59c5e5a req-da325ee5-0b57-405c-9990-57a43eeae3f1 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Received event network-vif-deleted-b4fa95e4-d3ed-4f7a-b546-c1db145d291a {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1264.494057] env[62383]: INFO nova.compute.manager [req-6b14f543-9065-4075-9f54-227bb59c5e5a req-da325ee5-0b57-405c-9990-57a43eeae3f1 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Neutron deleted interface b4fa95e4-d3ed-4f7a-b546-c1db145d291a; detaching it from the instance and deleting it from the info cache [ 1264.494317] env[62383]: DEBUG nova.network.neutron [req-6b14f543-9065-4075-9f54-227bb59c5e5a req-da325ee5-0b57-405c-9990-57a43eeae3f1 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.519053] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.167s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1264.519560] env[62383]: DEBUG nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1264.830490] env[62383]: DEBUG oslo_concurrency.lockutils [req-f0a97dfd-8035-4052-b655-2aef2d057222 req-f429ed1a-09b5-4194-91df-e66df8530b3f service nova] Releasing lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1264.979412] env[62383]: DEBUG nova.network.neutron [-] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.989673] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452625, 'name': CreateVM_Task, 'duration_secs': 0.250079} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.990426] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1264.990854] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1264.991024] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.991418] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1264.991916] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16dc465c-c9e9-45dd-9533-f8a51d84fa45 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.997164] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1264.997164] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524df9b8-38fe-bc03-2913-fe5b2480e15c" [ 1264.997164] env[62383]: _type = "Task" [ 1264.997164] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1264.997432] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9909b9e-8fc8-469c-90d5-a384f4ff0b4e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.007443] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524df9b8-38fe-bc03-2913-fe5b2480e15c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.011247] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1066d215-b0d7-40fd-96d6-199e902857c9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.023941] env[62383]: DEBUG nova.compute.utils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1265.025254] env[62383]: DEBUG nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Not allocating networking since 'none' was specified. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1265.038036] env[62383]: DEBUG nova.compute.manager [req-6b14f543-9065-4075-9f54-227bb59c5e5a req-da325ee5-0b57-405c-9990-57a43eeae3f1 service nova] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Detach interface failed, port_id=b4fa95e4-d3ed-4f7a-b546-c1db145d291a, reason: Instance 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1265.240239] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1265.485248] env[62383]: INFO nova.compute.manager [-] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Took 1.34 seconds to deallocate network for instance. [ 1265.513038] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524df9b8-38fe-bc03-2913-fe5b2480e15c, 'name': SearchDatastore_Task, 'duration_secs': 0.010434} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.513489] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1265.513825] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1265.514262] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1265.514436] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.514727] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1265.515155] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b47341a-1975-47ac-8557-7d5b8ec1237a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.526498] env[62383]: DEBUG nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1265.530817] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1265.531117] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1265.532606] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ed6134e-29d6-437f-a233-ec56753654a2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.539974] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1265.539974] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52ac94c8-4693-eb35-ca19-be58a188fc25" [ 1265.539974] env[62383]: _type = "Task" [ 1265.539974] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.551529] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ac94c8-4693-eb35-ca19-be58a188fc25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.742984] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.743367] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.743619] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.743845] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1265.744787] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfd8d43-3f10-4314-81f7-05e8677f27fd {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.753128] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68f7323-b3a3-47d2-97bb-f71459240501 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.766785] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e703b43d-e389-4cd4-b766-a7dc2031d167 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.772994] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d084e90-a319-4a01-a6cb-bc1932eff958 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.803616] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180371MB free_disk=146GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1265.803616] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.803616] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.831061] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.831441] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.831486] env[62383]: DEBUG nova.compute.manager [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1265.832370] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d304c44e-c28a-43ff-a297-4006d8ab12d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.839344] env[62383]: DEBUG nova.compute.manager [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1265.839897] env[62383]: DEBUG nova.objects.instance [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'flavor' on Instance uuid 02e2865c-ff68-44ac-abc6-839e399bbe7c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.992192] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.051339] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52ac94c8-4693-eb35-ca19-be58a188fc25, 'name': SearchDatastore_Task, 'duration_secs': 0.009316} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.052070] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df65398e-b336-4bfa-8349-64494c9c52f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.057519] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1266.057519] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a90648-7c32-3ef1-819f-c0ee7c522bd1" [ 1266.057519] env[62383]: _type = "Task" [ 1266.057519] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.064619] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a90648-7c32-3ef1-819f-c0ee7c522bd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.537414] env[62383]: DEBUG nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1266.566615] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1266.566862] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1266.567034] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1266.567244] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1266.567395] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1266.567566] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1266.567788] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1266.567964] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1266.568156] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1266.568337] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1266.568525] env[62383]: DEBUG nova.virt.hardware [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1266.569351] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096a14d0-6558-445c-8d03-bab913e4f650 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.575633] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a90648-7c32-3ef1-819f-c0ee7c522bd1, 'name': SearchDatastore_Task, 'duration_secs': 0.008964} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.576326] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.576618] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ebc0fa37-eb1a-421c-a421-cd990c6b84dc/ebc0fa37-eb1a-421c-a421-cd990c6b84dc.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1266.576891] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e40010fa-221f-4e8d-8371-c2691c537c0f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.582095] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a7f46c-d9f9-4969-8f97-155407e6847b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.586991] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1266.586991] env[62383]: value = "task-2452626" [ 1266.586991] env[62383]: _type = "Task" [ 1266.586991] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.598257] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1266.604162] env[62383]: DEBUG oslo.service.loopingcall [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1266.604678] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1266.604882] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b270a7d5-a6e7-4c30-82f0-8863c4d3388b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.619608] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.624395] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1266.624395] env[62383]: value = "task-2452627" [ 1266.624395] env[62383]: _type = "Task" [ 1266.624395] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.631638] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452627, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.834636] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 02e2865c-ff68-44ac-abc6-839e399bbe7c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1266.834942] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance b8e512cd-5eb9-423c-9447-833e34909bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1266.835073] env[62383]: WARNING nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1266.835154] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance e05e6a48-6992-41f2-a937-2f8e022cf1f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1266.835282] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance ebc0fa37-eb1a-421c-a421-cd990c6b84dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1266.835402] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance a743070f-cdda-4bf3-a15a-8199e9d57e3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1266.835596] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1266.835736] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1266.846031] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1266.846411] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46f349b0-b8a8-41b6-bf9c-78fc62124aee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.856957] env[62383]: DEBUG oslo_vmware.api [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1266.856957] env[62383]: value = "task-2452628" [ 1266.856957] env[62383]: _type = "Task" [ 1266.856957] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.865216] env[62383]: DEBUG oslo_vmware.api [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.935965] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353bbe43-4ca3-4f4f-afe8-e22c30f484ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.944824] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8797c6-8658-413a-9d7c-f34c6fb7a592 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.976938] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6286f760-1ab0-43fa-968a-624dff29b943 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.985068] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98319fca-f05f-4368-a1d4-f0f703a2c60c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.999364] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1267.097345] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456418} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.097581] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] ebc0fa37-eb1a-421c-a421-cd990c6b84dc/ebc0fa37-eb1a-421c-a421-cd990c6b84dc.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1267.097758] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1267.098026] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8a3b282-1c24-4ca2-b0e4-a265dd809fd2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.104635] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1267.104635] env[62383]: value = "task-2452629" [ 1267.104635] env[62383]: _type = "Task" [ 1267.104635] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.112541] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.133526] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452627, 'name': CreateVM_Task, 'duration_secs': 0.473485} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.133719] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1267.134181] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.134269] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.134585] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1267.134829] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6907ff87-4367-47b5-ae6b-6ad26f7f2b95 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.139584] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1267.139584] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]522dd368-05b1-ea67-6e9f-435a7cd40962" [ 1267.139584] env[62383]: _type = "Task" [ 1267.139584] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.147290] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522dd368-05b1-ea67-6e9f-435a7cd40962, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.367651] env[62383]: DEBUG oslo_vmware.api [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452628, 'name': PowerOffVM_Task, 'duration_secs': 0.318279} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.367899] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1267.368118] env[62383]: DEBUG nova.compute.manager [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1267.368859] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46af3880-b9dd-40b3-b0de-2629449b9ee3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.505022] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1267.615480] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.313689} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.615772] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1267.616556] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b626b01-528f-46a6-b599-cb7d91b60bf1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.635801] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] ebc0fa37-eb1a-421c-a421-cd990c6b84dc/ebc0fa37-eb1a-421c-a421-cd990c6b84dc.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1267.636068] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d7dc55e-0c5b-44b4-9106-2c4d719e79cb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.660249] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]522dd368-05b1-ea67-6e9f-435a7cd40962, 'name': SearchDatastore_Task, 'duration_secs': 0.029539} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.661403] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1267.661637] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1267.661861] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.662014] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.662200] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1267.662495] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1267.662495] env[62383]: value = "task-2452630" [ 1267.662495] env[62383]: _type = "Task" [ 1267.662495] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.662660] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-289da87b-392b-4020-9dde-32b264747180 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.673313] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452630, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.674268] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1267.674417] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1267.675169] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-572f795e-f4b4-4674-966e-f2563ad4e79e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.679656] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1267.679656] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]521cea74-a614-d4ba-0c1a-102eb6376e33" [ 1267.679656] env[62383]: _type = "Task" [ 1267.679656] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.686760] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521cea74-a614-d4ba-0c1a-102eb6376e33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.879594] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb9e725d-5f05-4dc3-8840-93b0831255a8 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.008100] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1268.008422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.205s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.008803] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.017s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.009091] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.034861] env[62383]: INFO nova.scheduler.client.report [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Deleted allocations for instance 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8 [ 1268.174109] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452630, 'name': ReconfigVM_Task, 'duration_secs': 0.417494} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.174109] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Reconfigured VM instance instance-0000007b to attach disk [datastore2] ebc0fa37-eb1a-421c-a421-cd990c6b84dc/ebc0fa37-eb1a-421c-a421-cd990c6b84dc.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1268.174447] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09b93702-977d-457f-8a97-20103c00230e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.180370] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1268.180370] env[62383]: value = "task-2452631" [ 1268.180370] env[62383]: _type = "Task" [ 1268.180370] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.191353] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]521cea74-a614-d4ba-0c1a-102eb6376e33, 'name': SearchDatastore_Task, 'duration_secs': 0.0097} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.194694] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452631, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.194892] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85a945ce-4c40-4047-8efa-e0d3afc7b580 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.200016] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1268.200016] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4c1e0-e6e2-1363-4776-c0991f4f1e60" [ 1268.200016] env[62383]: _type = "Task" [ 1268.200016] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.207307] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4c1e0-e6e2-1363-4776-c0991f4f1e60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.215553] env[62383]: DEBUG nova.objects.instance [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'flavor' on Instance uuid 02e2865c-ff68-44ac-abc6-839e399bbe7c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.542938] env[62383]: DEBUG oslo_concurrency.lockutils [None req-bb4ace45-7ccf-4ca1-9fd5-cf6a658a3c5b tempest-AttachInterfacesV270Test-725423304 tempest-AttachInterfacesV270Test-725423304-project-member] Lock "8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.014s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1268.692917] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452631, 'name': Rename_Task, 'duration_secs': 0.169808} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.693214] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1268.693457] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0b90322-e308-4623-a018-4b374da209f6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.698931] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1268.698931] env[62383]: value = "task-2452632" [ 1268.698931] env[62383]: _type = "Task" [ 1268.698931] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.708918] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.712148] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52f4c1e0-e6e2-1363-4776-c0991f4f1e60, 'name': SearchDatastore_Task, 'duration_secs': 0.012167} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.712441] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1268.712713] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1268.712948] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96564283-0674-4fa1-9567-017d4acb1051 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.722713] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1268.722713] env[62383]: value = "task-2452633" [ 1268.722713] env[62383]: _type = "Task" [ 1268.722713] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.727189] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1268.727455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1268.727742] env[62383]: DEBUG nova.network.neutron [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1268.728052] env[62383]: DEBUG nova.objects.instance [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'info_cache' on Instance uuid 02e2865c-ff68-44ac-abc6-839e399bbe7c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1268.736184] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.213110] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452632, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.232515] env[62383]: DEBUG nova.objects.base [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Object Instance<02e2865c-ff68-44ac-abc6-839e399bbe7c> lazy-loaded attributes: flavor,info_cache {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1269.233946] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452633, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50871} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.233946] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1269.234270] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1269.234468] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-acaf1303-88c4-4f76-8033-01c7b853766c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.241747] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1269.241747] env[62383]: value = "task-2452634" [ 1269.241747] env[62383]: _type = "Task" [ 1269.241747] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.250686] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452634, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.710912] env[62383]: DEBUG oslo_vmware.api [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452632, 'name': PowerOnVM_Task, 'duration_secs': 0.730728} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.710912] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1269.710912] env[62383]: INFO nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Took 5.33 seconds to spawn the instance on the hypervisor. [ 1269.710912] env[62383]: DEBUG nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1269.711387] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09274f7a-af4d-4cde-8c02-4ad81ed884a1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.751104] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452634, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061996} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.751431] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1269.752371] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285a8067-d23c-4b61-9b97-f029ba304b12 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.773375] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1269.774242] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0d3f245-c33f-4487-a31f-dc4b81bd42cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.799405] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1269.799405] env[62383]: value = "task-2452635" [ 1269.799405] env[62383]: _type = "Task" [ 1269.799405] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.806141] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.978595] env[62383]: DEBUG nova.network.neutron [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updating instance_info_cache with network_info: [{"id": "a782529d-0901-4b64-93d5-d80e66052a01", "address": "fa:16:3e:7f:b0:78", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa782529d-09", "ovs_interfaceid": "a782529d-0901-4b64-93d5-d80e66052a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.011967] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.011967] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.011967] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1270.011967] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Rebuilding the list of instances to heal {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1270.227695] env[62383]: INFO nova.compute.manager [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Took 10.06 seconds to build instance. [ 1270.308995] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452635, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.480925] env[62383]: DEBUG oslo_concurrency.lockutils [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.515881] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Skipping network cache update for instance because it is Building. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10458}} [ 1270.541675] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1270.541828] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.541981] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1270.542183] env[62383]: DEBUG nova.objects.instance [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lazy-loading 'info_cache' on Instance uuid b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.729433] env[62383]: DEBUG oslo_concurrency.lockutils [None req-23ae525a-bbc7-4cb3-afd7-24642650a37b tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.565s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.808356] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452635, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.310532] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452635, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.486806] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1271.487158] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5aa498b-0aa8-49ad-9b1b-e0a96b10395b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.496250] env[62383]: DEBUG oslo_vmware.api [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1271.496250] env[62383]: value = "task-2452636" [ 1271.496250] env[62383]: _type = "Task" [ 1271.496250] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.504220] env[62383]: DEBUG oslo_vmware.api [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452636, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.809590] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452635, 'name': ReconfigVM_Task, 'duration_secs': 1.584512} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.810316] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Reconfigured VM instance instance-0000007c to attach disk [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1271.811434] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69c32635-5b6f-4708-bb41-d4348e500f1f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.822016] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1271.822016] env[62383]: value = "task-2452637" [ 1271.822016] env[62383]: _type = "Task" [ 1271.822016] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.830548] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452637, 'name': Rename_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.006379] env[62383]: DEBUG oslo_vmware.api [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452636, 'name': PowerOnVM_Task, 'duration_secs': 0.451826} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.006893] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1272.007014] env[62383]: DEBUG nova.compute.manager [None req-1cde87cb-646e-4022-990b-0ceacbc7fa9f tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1272.007741] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cfe48e-53ef-4a44-85bc-810a49bfa3c4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.330389] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452637, 'name': Rename_Task, 'duration_secs': 0.150659} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.330739] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1272.330883] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-188004a3-3c11-4c7f-9937-6b8fc1bdda64 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.336721] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1272.336721] env[62383]: value = "task-2452638" [ 1272.336721] env[62383]: _type = "Task" [ 1272.336721] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.343799] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452638, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.397775] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [{"id": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "address": "fa:16:3e:b4:77:63", "network": {"id": "fb579c5b-8022-44ad-a4ed-090781beab2e", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-472640399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b395bdf2df794b32a117f93fa4887c8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d1962c-c0", "ovs_interfaceid": "f1d1962c-c0f2-4e5b-9885-b71019f7e792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.849817] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452638, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.901441] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-b8e512cd-5eb9-423c-9447-833e34909bc3" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.904309] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1272.904309] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.904309] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.904309] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.904309] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.904309] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.904309] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.904309] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1273.058834] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5312ff7c-eb68-42f1-a13f-2593f812fe7e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.069926] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1c5a2548-073d-4701-85ae-83f7ca37ad87 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Suspending the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1273.070226] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-162f99da-2ae0-4180-8522-6a5f38ded993 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.077588] env[62383]: DEBUG oslo_vmware.api [None req-1c5a2548-073d-4701-85ae-83f7ca37ad87 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1273.077588] env[62383]: value = "task-2452639" [ 1273.077588] env[62383]: _type = "Task" [ 1273.077588] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.087291] env[62383]: DEBUG oslo_vmware.api [None req-1c5a2548-073d-4701-85ae-83f7ca37ad87 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452639, 'name': SuspendVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.349524] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452638, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.502405] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "e2188b8a-4982-49ea-945c-3e0a3437ae81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.502655] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.589940] env[62383]: DEBUG oslo_vmware.api [None req-1c5a2548-073d-4701-85ae-83f7ca37ad87 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452639, 'name': SuspendVM_Task} progress is 70%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.849028] env[62383]: DEBUG oslo_vmware.api [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452638, 'name': PowerOnVM_Task, 'duration_secs': 1.221948} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.849398] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1273.849613] env[62383]: INFO nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1273.849793] env[62383]: DEBUG nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1273.850565] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228b7eca-9a90-49a8-841d-30bbd7c3c39c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.004758] env[62383]: DEBUG nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1274.087876] env[62383]: DEBUG oslo_vmware.api [None req-1c5a2548-073d-4701-85ae-83f7ca37ad87 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452639, 'name': SuspendVM_Task, 'duration_secs': 0.587837} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.088189] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-1c5a2548-073d-4701-85ae-83f7ca37ad87 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Suspended the VM {{(pid=62383) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1274.088373] env[62383]: DEBUG nova.compute.manager [None req-1c5a2548-073d-4701-85ae-83f7ca37ad87 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1274.089163] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d88c67-8757-4d93-9c4d-f5f48814839d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.366766] env[62383]: INFO nova.compute.manager [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Took 13.91 seconds to build instance. [ 1274.527209] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.527597] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.529082] env[62383]: INFO nova.compute.claims [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1274.634322] env[62383]: INFO nova.compute.manager [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Rebuilding instance [ 1274.673818] env[62383]: DEBUG nova.compute.manager [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1274.674692] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d2ec58-9db4-48db-92f5-06c60abcba4f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.868916] env[62383]: DEBUG oslo_concurrency.lockutils [None req-d77d6d2c-f7e8-4fe5-b810-bd07ea72f138 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.421s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.470342] env[62383]: INFO nova.compute.manager [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Resuming [ 1275.471012] env[62383]: DEBUG nova.objects.instance [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'flavor' on Instance uuid 02e2865c-ff68-44ac-abc6-839e399bbe7c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1275.618730] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f0c38c-731f-4532-965f-36e3e2fbdc5d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.626533] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95e3140-6647-4e34-8508-cc7339d39a3b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.657063] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4149f4b-1dbe-4d7b-8a73-3880a7548359 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.664235] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f986066-af3e-4763-b0b8-e5868e06490e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.676966] env[62383]: DEBUG nova.compute.provider_tree [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1275.687333] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1275.687573] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9391ea0e-6df9-4bab-a790-85ab6e26b6ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.694625] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1275.694625] env[62383]: value = "task-2452640" [ 1275.694625] env[62383]: _type = "Task" [ 1275.694625] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.702625] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.200030] env[62383]: ERROR nova.scheduler.client.report [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [req-040b45cc-e4e5-461d-91ac-6555968cc01e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 145, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 60615f54-0557-436e-a486-87505bffb4c7. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-040b45cc-e4e5-461d-91ac-6555968cc01e"}]} [ 1276.208164] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452640, 'name': PowerOffVM_Task, 'duration_secs': 0.110189} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.208476] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.208726] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1276.209644] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b09e590-86de-4b9b-bd2d-584bb2e18600 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.216603] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1276.218098] env[62383]: DEBUG nova.scheduler.client.report [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Refreshing inventories for resource provider 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1276.219238] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca270898-23a3-4a32-8a87-fffeffe54fe7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.232548] env[62383]: DEBUG nova.scheduler.client.report [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Updating ProviderTree inventory for provider 60615f54-0557-436e-a486-87505bffb4c7 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1276.232772] env[62383]: DEBUG nova.compute.provider_tree [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Updating inventory in ProviderTree for provider 60615f54-0557-436e-a486-87505bffb4c7 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1276.243231] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1276.243496] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1276.243735] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleting the datastore file [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.244646] env[62383]: DEBUG nova.scheduler.client.report [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Refreshing aggregate associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, aggregates: None {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1276.246579] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-199c6308-1717-4a1e-9e1c-1d5232a37b0e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.253623] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1276.253623] env[62383]: value = "task-2452642" [ 1276.253623] env[62383]: _type = "Task" [ 1276.253623] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.261715] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452642, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.266604] env[62383]: DEBUG nova.scheduler.client.report [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Refreshing trait associations for resource provider 60615f54-0557-436e-a486-87505bffb4c7, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,HW_ARCH_X86_64,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62383) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1276.349299] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e2e924-1822-4d8d-8f27-08093864c1b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.357068] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d81c3a1-6c5b-49ce-883b-92360748ecf2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.387391] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccd8cc6-9fed-4e7a-a849-c2745f421722 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.394475] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29ed2ae-f99d-4937-ab8a-dccab8971e77 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.407268] env[62383]: DEBUG nova.compute.provider_tree [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.763389] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452642, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089976} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.763767] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1276.763808] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1276.764066] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1276.912976] env[62383]: DEBUG nova.scheduler.client.report [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1276.981236] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.981467] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquired lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.981623] env[62383]: DEBUG nova.network.neutron [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1277.415415] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.888s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.415969] env[62383]: DEBUG nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1277.693892] env[62383]: DEBUG nova.network.neutron [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updating instance_info_cache with network_info: [{"id": "a782529d-0901-4b64-93d5-d80e66052a01", "address": "fa:16:3e:7f:b0:78", "network": {"id": "da412528-d69b-4d87-ac21-eaa5f2ac83c7", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-129708345-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e320302a6b1e466e887c787006413dec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa782529d-09", "ovs_interfaceid": "a782529d-0901-4b64-93d5-d80e66052a01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.798627] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1277.798904] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1277.799117] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1277.799371] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1277.799561] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1277.799744] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1277.799989] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1277.800405] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1277.800405] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1277.800557] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1277.800781] env[62383]: DEBUG nova.virt.hardware [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1277.801709] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249c4378-98b9-4bed-b1f4-561501e30293 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.809471] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b243bb7-c809-45e7-b6f5-91cc786ca473 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.823267] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Instance VIF info [] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1277.828775] env[62383]: DEBUG oslo.service.loopingcall [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.828993] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1277.829200] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b45b89cb-9119-469f-9d8b-86f5b5e8896f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.845812] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1277.845812] env[62383]: value = "task-2452643" [ 1277.845812] env[62383]: _type = "Task" [ 1277.845812] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.852864] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452643, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.920936] env[62383]: DEBUG nova.compute.utils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1277.922424] env[62383]: DEBUG nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1277.922598] env[62383]: DEBUG nova.network.neutron [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1277.963783] env[62383]: DEBUG nova.policy [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e164391e4594492863a7ab5e66cb2ba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1da16200e3d44e3e8f44c4efad18b6cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1278.196285] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Releasing lock "refresh_cache-02e2865c-ff68-44ac-abc6-839e399bbe7c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.197764] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9d6d85-56d2-4547-b31c-a857a7280896 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.204197] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Resuming the VM {{(pid=62383) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1278.204458] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d418041f-4bb3-4623-a91f-9729ddd8abe6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.208794] env[62383]: DEBUG nova.network.neutron [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Successfully created port: 87022a62-e164-4fdc-8bdf-a41eb2391625 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1278.212061] env[62383]: DEBUG oslo_vmware.api [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1278.212061] env[62383]: value = "task-2452644" [ 1278.212061] env[62383]: _type = "Task" [ 1278.212061] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.219854] env[62383]: DEBUG oslo_vmware.api [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452644, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.356185] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452643, 'name': CreateVM_Task, 'duration_secs': 0.323431} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.356350] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1278.357012] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.357012] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.357155] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1278.357381] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98b1a4e4-f752-49b4-9e0e-ce2c496ebad1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.361853] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1278.361853] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]527e8d77-28af-8dcf-7cef-fa7de3456065" [ 1278.361853] env[62383]: _type = "Task" [ 1278.361853] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.370036] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527e8d77-28af-8dcf-7cef-fa7de3456065, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.425927] env[62383]: DEBUG nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1278.723113] env[62383]: DEBUG oslo_vmware.api [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452644, 'name': PowerOnVM_Task, 'duration_secs': 0.443507} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.723417] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Resumed the VM {{(pid=62383) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1278.723605] env[62383]: DEBUG nova.compute.manager [None req-f52d381c-faba-45f5-939d-6cbf9b2e9864 tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1278.724418] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf28b0a4-98a1-4618-9f90-389f7d801f35 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.872513] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]527e8d77-28af-8dcf-7cef-fa7de3456065, 'name': SearchDatastore_Task, 'duration_secs': 0.010267} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.873097] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.873097] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1278.873342] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.873499] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.873683] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1278.873955] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1788ac52-47e3-4556-944a-9217b5bab05c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.897543] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1278.897803] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1278.898593] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43384715-c1f7-49ca-a7b0-09f58f9fec07 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.905317] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1278.905317] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b99ae4-5b6f-e4a8-333d-6e2f04588d82" [ 1278.905317] env[62383]: _type = "Task" [ 1278.905317] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.913292] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b99ae4-5b6f-e4a8-333d-6e2f04588d82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.416429] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b99ae4-5b6f-e4a8-333d-6e2f04588d82, 'name': SearchDatastore_Task, 'duration_secs': 0.030912} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.417226] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7028cb72-d341-490b-b569-8a0c3b739c64 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.422333] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1279.422333] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52e30b04-c7fd-c638-50fe-228cbf61c6fd" [ 1279.422333] env[62383]: _type = "Task" [ 1279.422333] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.429584] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e30b04-c7fd-c638-50fe-228cbf61c6fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.435735] env[62383]: DEBUG nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1279.463139] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1279.463548] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.464076] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1279.464154] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.464406] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1279.465061] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1279.465061] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1279.465061] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1279.465552] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1279.465621] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1279.465782] env[62383]: DEBUG nova.virt.hardware [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1279.467026] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db87c84e-f710-423f-ac2c-8ebdb9001011 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.474651] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7b8ad5-3adc-4d89-b35a-43b402846040 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.593434] env[62383]: DEBUG nova.compute.manager [req-68c561e7-8ff4-400c-932b-80fa62940ff3 req-4dc58fb4-c3e4-474b-875b-d3809d569477 service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Received event network-vif-plugged-87022a62-e164-4fdc-8bdf-a41eb2391625 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1279.593654] env[62383]: DEBUG oslo_concurrency.lockutils [req-68c561e7-8ff4-400c-932b-80fa62940ff3 req-4dc58fb4-c3e4-474b-875b-d3809d569477 service nova] Acquiring lock "e2188b8a-4982-49ea-945c-3e0a3437ae81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.593862] env[62383]: DEBUG oslo_concurrency.lockutils [req-68c561e7-8ff4-400c-932b-80fa62940ff3 req-4dc58fb4-c3e4-474b-875b-d3809d569477 service nova] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.594044] env[62383]: DEBUG oslo_concurrency.lockutils [req-68c561e7-8ff4-400c-932b-80fa62940ff3 req-4dc58fb4-c3e4-474b-875b-d3809d569477 service nova] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.594213] env[62383]: DEBUG nova.compute.manager [req-68c561e7-8ff4-400c-932b-80fa62940ff3 req-4dc58fb4-c3e4-474b-875b-d3809d569477 service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] No waiting events found dispatching network-vif-plugged-87022a62-e164-4fdc-8bdf-a41eb2391625 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1279.594378] env[62383]: WARNING nova.compute.manager [req-68c561e7-8ff4-400c-932b-80fa62940ff3 req-4dc58fb4-c3e4-474b-875b-d3809d569477 service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Received unexpected event network-vif-plugged-87022a62-e164-4fdc-8bdf-a41eb2391625 for instance with vm_state building and task_state spawning. [ 1279.687488] env[62383]: DEBUG nova.network.neutron [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Successfully updated port: 87022a62-e164-4fdc-8bdf-a41eb2391625 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.932809] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52e30b04-c7fd-c638-50fe-228cbf61c6fd, 'name': SearchDatastore_Task, 'duration_secs': 0.010228} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.933182] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1279.933310] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1279.933568] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fb8a504-153a-4f32-a483-073d21c0f05d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.940288] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1279.940288] env[62383]: value = "task-2452645" [ 1279.940288] env[62383]: _type = "Task" [ 1279.940288] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.947564] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452645, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.138827] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.139133] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.139355] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "02e2865c-ff68-44ac-abc6-839e399bbe7c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.139543] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.139715] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.142141] env[62383]: INFO nova.compute.manager [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Terminating instance [ 1280.190570] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "refresh_cache-e2188b8a-4982-49ea-945c-3e0a3437ae81" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.190718] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquired lock "refresh_cache-e2188b8a-4982-49ea-945c-3e0a3437ae81" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.190869] env[62383]: DEBUG nova.network.neutron [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1280.450852] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452645, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.647060] env[62383]: DEBUG nova.compute.manager [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1280.647060] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1280.647434] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca00dd99-039f-4962-8d3c-f4218057bfa4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.654763] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1280.654987] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fdd6796-a57f-458c-9e5d-013e885f212a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.661264] env[62383]: DEBUG oslo_vmware.api [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1280.661264] env[62383]: value = "task-2452646" [ 1280.661264] env[62383]: _type = "Task" [ 1280.661264] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.668762] env[62383]: DEBUG oslo_vmware.api [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452646, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.723236] env[62383]: DEBUG nova.network.neutron [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1280.852651] env[62383]: DEBUG nova.network.neutron [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Updating instance_info_cache with network_info: [{"id": "87022a62-e164-4fdc-8bdf-a41eb2391625", "address": "fa:16:3e:d5:7b:5d", "network": {"id": "b702c6aa-24d8-44d4-bc8c-f98f6ba732da", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1450393807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1da16200e3d44e3e8f44c4efad18b6cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87022a62-e1", "ovs_interfaceid": "87022a62-e164-4fdc-8bdf-a41eb2391625", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.951970] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452645, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573122} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.952359] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1280.952444] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1280.952677] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f91a2a4-426c-4d1e-917b-7c6eb50443c1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.959198] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1280.959198] env[62383]: value = "task-2452647" [ 1280.959198] env[62383]: _type = "Task" [ 1280.959198] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.966269] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452647, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.171340] env[62383]: DEBUG oslo_vmware.api [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452646, 'name': PowerOffVM_Task, 'duration_secs': 0.171917} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.171594] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1281.171764] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1281.172017] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01e2c216-41b3-499d-8260-6b0e8871c762 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.235279] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1281.235665] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1281.235877] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleting the datastore file [datastore2] 02e2865c-ff68-44ac-abc6-839e399bbe7c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1281.236160] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f672ca95-681b-4e3c-9b29-b9566fcaeca9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.243069] env[62383]: DEBUG oslo_vmware.api [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for the task: (returnval){ [ 1281.243069] env[62383]: value = "task-2452649" [ 1281.243069] env[62383]: _type = "Task" [ 1281.243069] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.250199] env[62383]: DEBUG oslo_vmware.api [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.355067] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Releasing lock "refresh_cache-e2188b8a-4982-49ea-945c-3e0a3437ae81" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.355471] env[62383]: DEBUG nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Instance network_info: |[{"id": "87022a62-e164-4fdc-8bdf-a41eb2391625", "address": "fa:16:3e:d5:7b:5d", "network": {"id": "b702c6aa-24d8-44d4-bc8c-f98f6ba732da", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1450393807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1da16200e3d44e3e8f44c4efad18b6cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87022a62-e1", "ovs_interfaceid": "87022a62-e164-4fdc-8bdf-a41eb2391625", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1281.355916] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:7b:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87022a62-e164-4fdc-8bdf-a41eb2391625', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1281.363461] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Creating folder: Project (1da16200e3d44e3e8f44c4efad18b6cf). Parent ref: group-v496304. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.363742] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b15a973-03bb-46ee-bf92-3851b468eafc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.374828] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Created folder: Project (1da16200e3d44e3e8f44c4efad18b6cf) in parent group-v496304. [ 1281.375013] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Creating folder: Instances. Parent ref: group-v496635. {{(pid=62383) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1281.375289] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-efeec74c-6c7e-4791-9f36-e55b7bb5850f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.389013] env[62383]: INFO nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Created folder: Instances in parent group-v496635. [ 1281.389257] env[62383]: DEBUG oslo.service.loopingcall [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1281.389442] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1281.389633] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94ea4669-7a60-49f9-8f80-7cc79c687d0c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.407180] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1281.407180] env[62383]: value = "task-2452652" [ 1281.407180] env[62383]: _type = "Task" [ 1281.407180] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.414124] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452652, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.467677] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452647, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063174} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.467903] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1281.468617] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b623f39-8705-4789-b4d3-b911340644b6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.487643] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1281.487886] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdcf0b90-c081-4888-ba0b-ab1f341c3380 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.506569] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1281.506569] env[62383]: value = "task-2452653" [ 1281.506569] env[62383]: _type = "Task" [ 1281.506569] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.513583] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452653, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.641551] env[62383]: DEBUG nova.compute.manager [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Received event network-changed-87022a62-e164-4fdc-8bdf-a41eb2391625 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1281.641794] env[62383]: DEBUG nova.compute.manager [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Refreshing instance network info cache due to event network-changed-87022a62-e164-4fdc-8bdf-a41eb2391625. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1281.641986] env[62383]: DEBUG oslo_concurrency.lockutils [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] Acquiring lock "refresh_cache-e2188b8a-4982-49ea-945c-3e0a3437ae81" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.642145] env[62383]: DEBUG oslo_concurrency.lockutils [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] Acquired lock "refresh_cache-e2188b8a-4982-49ea-945c-3e0a3437ae81" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.642309] env[62383]: DEBUG nova.network.neutron [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Refreshing network info cache for port 87022a62-e164-4fdc-8bdf-a41eb2391625 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1281.753017] env[62383]: DEBUG oslo_vmware.api [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Task: {'id': task-2452649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139812} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.753343] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1281.753568] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1281.753777] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1281.753980] env[62383]: INFO nova.compute.manager [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1281.754273] env[62383]: DEBUG oslo.service.loopingcall [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1281.754494] env[62383]: DEBUG nova.compute.manager [-] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1281.754609] env[62383]: DEBUG nova.network.neutron [-] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1281.917470] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452652, 'name': CreateVM_Task} progress is 25%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.015725] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452653, 'name': ReconfigVM_Task, 'duration_secs': 0.31225} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.016143] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Reconfigured VM instance instance-0000007c to attach disk [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c/a743070f-cdda-4bf3-a15a-8199e9d57e3c.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1282.016663] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b4053003-ba85-4d03-a340-ce197e6c6d32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.022770] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1282.022770] env[62383]: value = "task-2452654" [ 1282.022770] env[62383]: _type = "Task" [ 1282.022770] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.031868] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452654, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.347771] env[62383]: DEBUG nova.network.neutron [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Updated VIF entry in instance network info cache for port 87022a62-e164-4fdc-8bdf-a41eb2391625. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1282.348132] env[62383]: DEBUG nova.network.neutron [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Updating instance_info_cache with network_info: [{"id": "87022a62-e164-4fdc-8bdf-a41eb2391625", "address": "fa:16:3e:d5:7b:5d", "network": {"id": "b702c6aa-24d8-44d4-bc8c-f98f6ba732da", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1450393807-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1da16200e3d44e3e8f44c4efad18b6cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87022a62-e1", "ovs_interfaceid": "87022a62-e164-4fdc-8bdf-a41eb2391625", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.417751] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452652, 'name': CreateVM_Task, 'duration_secs': 0.656073} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.417937] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1282.418621] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.418782] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.419120] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1282.419366] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0231950-2053-4e85-923f-bfd6c49c4d57 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.423539] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1282.423539] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52b14ec7-9f1c-3f91-d620-e65a19d7c7d2" [ 1282.423539] env[62383]: _type = "Task" [ 1282.423539] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.430521] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b14ec7-9f1c-3f91-d620-e65a19d7c7d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.531439] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452654, 'name': Rename_Task, 'duration_secs': 0.138536} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.531695] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1282.531919] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e37f8caa-be29-42eb-9e2a-3dee8676a7be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.538479] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1282.538479] env[62383]: value = "task-2452655" [ 1282.538479] env[62383]: _type = "Task" [ 1282.538479] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.548488] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452655, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.704380] env[62383]: DEBUG nova.network.neutron [-] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.850805] env[62383]: DEBUG oslo_concurrency.lockutils [req-8d36a983-c8c9-432a-853f-a71d80635935 req-c9afc7c5-e846-44dd-a3aa-03720ca488db service nova] Releasing lock "refresh_cache-e2188b8a-4982-49ea-945c-3e0a3437ae81" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.934819] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52b14ec7-9f1c-3f91-d620-e65a19d7c7d2, 'name': SearchDatastore_Task, 'duration_secs': 0.008925} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.935139] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.935419] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1282.935741] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.935840] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.936066] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1282.936325] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d2cd4d5-0b20-4d14-ae1d-995c986385a5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.944643] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1282.944832] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1282.945587] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fee5fd54-d8a7-4113-98bb-bb3f552dccb6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.951376] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1282.951376] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524c06b9-7da8-ea2c-8828-430cd66363e9" [ 1282.951376] env[62383]: _type = "Task" [ 1282.951376] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.958857] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524c06b9-7da8-ea2c-8828-430cd66363e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.048603] env[62383]: DEBUG oslo_vmware.api [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452655, 'name': PowerOnVM_Task, 'duration_secs': 0.419866} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.048884] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1283.049110] env[62383]: DEBUG nova.compute.manager [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1283.049971] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414883cb-03e5-4213-8156-66be34522f26 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.207539] env[62383]: INFO nova.compute.manager [-] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Took 1.45 seconds to deallocate network for instance. [ 1283.462195] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524c06b9-7da8-ea2c-8828-430cd66363e9, 'name': SearchDatastore_Task, 'duration_secs': 0.00823} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.462978] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df5cdf29-ced8-4fa5-a4e2-0f659cc1f8fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.467828] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1283.467828] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]524b7a6e-2360-2df4-4b0e-6dcac8719e9c" [ 1283.467828] env[62383]: _type = "Task" [ 1283.467828] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.475327] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524b7a6e-2360-2df4-4b0e-6dcac8719e9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.567229] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.567431] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.567639] env[62383]: DEBUG nova.objects.instance [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62383) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1283.668099] env[62383]: DEBUG nova.compute.manager [req-eb0c53c2-8540-46b0-a4ee-3acba70257d6 req-d982223d-488e-4495-9699-09188a8a9eae service nova] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Received event network-vif-deleted-a782529d-0901-4b64-93d5-d80e66052a01 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1283.713799] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.978276] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]524b7a6e-2360-2df4-4b0e-6dcac8719e9c, 'name': SearchDatastore_Task, 'duration_secs': 0.009958} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.978648] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.978948] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] e2188b8a-4982-49ea-945c-3e0a3437ae81/e2188b8a-4982-49ea-945c-3e0a3437ae81.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1283.979271] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-502412d8-1442-4617-9626-f7ef1da115de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.988215] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1283.988215] env[62383]: value = "task-2452656" [ 1283.988215] env[62383]: _type = "Task" [ 1283.988215] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.996914] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.490198] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.490581] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.490721] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.490905] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.491088] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.496520] env[62383]: INFO nova.compute.manager [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Terminating instance [ 1284.503463] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452656, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455011} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.503663] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] e2188b8a-4982-49ea-945c-3e0a3437ae81/e2188b8a-4982-49ea-945c-3e0a3437ae81.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1284.503870] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1284.504119] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b146883-0105-410c-8780-8329ab9e46a7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.511706] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1284.511706] env[62383]: value = "task-2452657" [ 1284.511706] env[62383]: _type = "Task" [ 1284.511706] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.518790] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452657, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.576454] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99132fea-32ec-4fe2-80d5-365992b72c21 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1284.578343] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.865s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.578622] env[62383]: DEBUG nova.objects.instance [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lazy-loading 'resources' on Instance uuid 02e2865c-ff68-44ac-abc6-839e399bbe7c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1285.000622] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "refresh_cache-a743070f-cdda-4bf3-a15a-8199e9d57e3c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.000821] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "refresh_cache-a743070f-cdda-4bf3-a15a-8199e9d57e3c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.001015] env[62383]: DEBUG nova.network.neutron [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1285.021957] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452657, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054153} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.022210] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1285.022941] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26471d7a-7114-4e18-b515-b49233f0334d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.043883] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] e2188b8a-4982-49ea-945c-3e0a3437ae81/e2188b8a-4982-49ea-945c-3e0a3437ae81.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.044327] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06b3cc12-f9b2-442e-943a-e0f94e493469 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.062991] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1285.062991] env[62383]: value = "task-2452658" [ 1285.062991] env[62383]: _type = "Task" [ 1285.062991] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.071909] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452658, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.169694] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070cf8ae-b8eb-4b85-93b4-e05b68edf539 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.177161] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1506bc6-5b10-4e45-a151-673e866772f7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.207455] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba1ea32-153b-4509-847b-f606a6642ec9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.215210] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a341a38e-e8fa-47d3-8b41-31e413b0d65d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.228613] env[62383]: DEBUG nova.compute.provider_tree [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1285.520416] env[62383]: DEBUG nova.network.neutron [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1285.568676] env[62383]: DEBUG nova.network.neutron [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.575651] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452658, 'name': ReconfigVM_Task, 'duration_secs': 0.274542} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.575944] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Reconfigured VM instance instance-0000007d to attach disk [datastore2] e2188b8a-4982-49ea-945c-3e0a3437ae81/e2188b8a-4982-49ea-945c-3e0a3437ae81.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1285.576612] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9810b58-8f45-45d1-b4b9-26dfa4256381 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.583644] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1285.583644] env[62383]: value = "task-2452659" [ 1285.583644] env[62383]: _type = "Task" [ 1285.583644] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.591178] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452659, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.732280] env[62383]: DEBUG nova.scheduler.client.report [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1286.071511] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "refresh_cache-a743070f-cdda-4bf3-a15a-8199e9d57e3c" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.071987] env[62383]: DEBUG nova.compute.manager [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1286.072203] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1286.073092] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f7c4dc-a99a-434d-98a5-f747721f45be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.081247] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1286.081475] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5e877a3-7556-40ed-b299-5fbb2bf103df {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.087736] env[62383]: DEBUG oslo_vmware.api [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1286.087736] env[62383]: value = "task-2452660" [ 1286.087736] env[62383]: _type = "Task" [ 1286.087736] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.095075] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452659, 'name': Rename_Task, 'duration_secs': 0.135665} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.095657] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1286.095858] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ee9cec6-b18a-4531-ad84-776214236264 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.100010] env[62383]: DEBUG oslo_vmware.api [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.101532] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1286.101532] env[62383]: value = "task-2452661" [ 1286.101532] env[62383]: _type = "Task" [ 1286.101532] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.108629] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452661, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.237923] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1286.256481] env[62383]: INFO nova.scheduler.client.report [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Deleted allocations for instance 02e2865c-ff68-44ac-abc6-839e399bbe7c [ 1286.597115] env[62383]: DEBUG oslo_vmware.api [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452660, 'name': PowerOffVM_Task, 'duration_secs': 0.184759} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.597406] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.597550] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1286.597793] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff59e4bd-ffaa-433a-acf8-4c58de81f87c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.609510] env[62383]: DEBUG oslo_vmware.api [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452661, 'name': PowerOnVM_Task, 'duration_secs': 0.437439} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.609747] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1286.609953] env[62383]: INFO nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Took 7.17 seconds to spawn the instance on the hypervisor. [ 1286.610146] env[62383]: DEBUG nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1286.610851] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7572090d-c3b3-4dea-9e9e-22c2d357f566 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.621036] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1286.621175] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1286.621271] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleting the datastore file [datastore2] a743070f-cdda-4bf3-a15a-8199e9d57e3c {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1286.621486] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fc2a564-c1a6-463d-8cdb-ef6afed155e1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.626528] env[62383]: DEBUG oslo_vmware.api [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1286.626528] env[62383]: value = "task-2452663" [ 1286.626528] env[62383]: _type = "Task" [ 1286.626528] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.633760] env[62383]: DEBUG oslo_vmware.api [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.764906] env[62383]: DEBUG oslo_concurrency.lockutils [None req-c64b11a5-7225-4f65-8161-81b38a890bbd tempest-ServerActionsTestJSON-382215 tempest-ServerActionsTestJSON-382215-project-member] Lock "02e2865c-ff68-44ac-abc6-839e399bbe7c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.626s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.127024] env[62383]: INFO nova.compute.manager [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Took 12.62 seconds to build instance. [ 1287.135632] env[62383]: DEBUG oslo_vmware.api [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.41522} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.136650] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1287.136650] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1287.136650] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1287.136650] env[62383]: INFO nova.compute.manager [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Took 1.06 seconds to destroy the instance on the hypervisor. [ 1287.136650] env[62383]: DEBUG oslo.service.loopingcall [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1287.136888] env[62383]: DEBUG nova.compute.manager [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1287.136888] env[62383]: DEBUG nova.network.neutron [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1287.152624] env[62383]: DEBUG nova.network.neutron [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1287.630728] env[62383]: DEBUG oslo_concurrency.lockutils [None req-998bea7d-2b5c-4d94-83cc-1ff1225526f2 tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.128s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.655278] env[62383]: DEBUG nova.network.neutron [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.749822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "e2188b8a-4982-49ea-945c-3e0a3437ae81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.749822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.749822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "e2188b8a-4982-49ea-945c-3e0a3437ae81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.749822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.749822] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.751027] env[62383]: INFO nova.compute.manager [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Terminating instance [ 1288.158326] env[62383]: INFO nova.compute.manager [-] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Took 1.02 seconds to deallocate network for instance. [ 1288.254558] env[62383]: DEBUG nova.compute.manager [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1288.254838] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1288.255793] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1750a10e-3781-46ae-a410-45b748a0c614 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.264950] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1288.265538] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb101645-03e0-4d43-bb19-85d89de8177d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.271913] env[62383]: DEBUG oslo_vmware.api [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1288.271913] env[62383]: value = "task-2452664" [ 1288.271913] env[62383]: _type = "Task" [ 1288.271913] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.279972] env[62383]: DEBUG oslo_vmware.api [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.665154] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.665501] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.665717] env[62383]: DEBUG nova.objects.instance [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lazy-loading 'resources' on Instance uuid a743070f-cdda-4bf3-a15a-8199e9d57e3c {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.782113] env[62383]: DEBUG oslo_vmware.api [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452664, 'name': PowerOffVM_Task, 'duration_secs': 0.185914} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.782383] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1288.782554] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1288.782793] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3a9b2a7-ea1b-4bd3-a528-c5ba0ac42322 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.846243] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1288.846513] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1288.846707] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Deleting the datastore file [datastore2] e2188b8a-4982-49ea-945c-3e0a3437ae81 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1288.846969] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5b8ada6-8412-46ec-9477-5f973c4713f8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.854513] env[62383]: DEBUG oslo_vmware.api [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for the task: (returnval){ [ 1288.854513] env[62383]: value = "task-2452666" [ 1288.854513] env[62383]: _type = "Task" [ 1288.854513] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.862870] env[62383]: DEBUG oslo_vmware.api [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.250815] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838a1ed5-7e8c-4b65-8be4-abc482adba86 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.258804] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cd8ac3-c201-4edc-bd02-ba9c731e5586 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.290449] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140eead0-27fd-4db9-bd9d-90e36524d813 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.298064] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49eae196-3e41-4fe0-a253-7843fe895648 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.311667] env[62383]: DEBUG nova.compute.provider_tree [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.365912] env[62383]: DEBUG oslo_vmware.api [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Task: {'id': task-2452666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141767} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.366232] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1289.366437] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1289.366642] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1289.366821] env[62383]: INFO nova.compute.manager [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1289.367122] env[62383]: DEBUG oslo.service.loopingcall [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1289.367293] env[62383]: DEBUG nova.compute.manager [-] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1289.367388] env[62383]: DEBUG nova.network.neutron [-] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1289.640588] env[62383]: DEBUG nova.compute.manager [req-2df1ebdf-0e6a-463a-937a-9b98fc0c98b4 req-2a1569ce-231b-4791-a8d4-62b73e158e01 service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Received event network-vif-deleted-87022a62-e164-4fdc-8bdf-a41eb2391625 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1289.640891] env[62383]: INFO nova.compute.manager [req-2df1ebdf-0e6a-463a-937a-9b98fc0c98b4 req-2a1569ce-231b-4791-a8d4-62b73e158e01 service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Neutron deleted interface 87022a62-e164-4fdc-8bdf-a41eb2391625; detaching it from the instance and deleting it from the info cache [ 1289.640976] env[62383]: DEBUG nova.network.neutron [req-2df1ebdf-0e6a-463a-937a-9b98fc0c98b4 req-2a1569ce-231b-4791-a8d4-62b73e158e01 service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.816578] env[62383]: DEBUG nova.scheduler.client.report [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1290.124576] env[62383]: DEBUG nova.network.neutron [-] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1290.144672] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7b8b861-5b78-435a-9fd7-46ca15d90216 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.155693] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670f4999-77a7-4cf5-bb8b-4d0c8ec803e3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.182363] env[62383]: DEBUG nova.compute.manager [req-2df1ebdf-0e6a-463a-937a-9b98fc0c98b4 req-2a1569ce-231b-4791-a8d4-62b73e158e01 service nova] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Detach interface failed, port_id=87022a62-e164-4fdc-8bdf-a41eb2391625, reason: Instance e2188b8a-4982-49ea-945c-3e0a3437ae81 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1290.321658] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.656s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.344963] env[62383]: INFO nova.scheduler.client.report [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleted allocations for instance a743070f-cdda-4bf3-a15a-8199e9d57e3c [ 1290.627480] env[62383]: INFO nova.compute.manager [-] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Took 1.26 seconds to deallocate network for instance. [ 1290.852876] env[62383]: DEBUG oslo_concurrency.lockutils [None req-58f72e6f-386a-4189-a941-efa14d0f9b99 tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "a743070f-cdda-4bf3-a15a-8199e9d57e3c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.362s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.134017] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.134305] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.134529] env[62383]: DEBUG nova.objects.instance [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lazy-loading 'resources' on Instance uuid e2188b8a-4982-49ea-945c-3e0a3437ae81 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.298145] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.298427] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.298640] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.298819] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.299011] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1291.301044] env[62383]: INFO nova.compute.manager [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Terminating instance [ 1291.696079] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fb0a2a-151f-4fff-8f6b-2624eedd109f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.703753] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1596b90-83e1-4c50-9ce4-a3e095558968 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.732683] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef98f84-6d81-4156-9f56-eb7f1906cf6b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.739412] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0da0e59-d924-46d0-9ba9-10c103f8a8ad {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.752196] env[62383]: DEBUG nova.compute.provider_tree [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.804559] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "refresh_cache-ebc0fa37-eb1a-421c-a421-cd990c6b84dc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1291.804746] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquired lock "refresh_cache-ebc0fa37-eb1a-421c-a421-cd990c6b84dc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.804925] env[62383]: DEBUG nova.network.neutron [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1292.257339] env[62383]: DEBUG nova.scheduler.client.report [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1292.321880] env[62383]: DEBUG nova.network.neutron [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1292.374838] env[62383]: DEBUG nova.network.neutron [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.762607] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.628s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.783225] env[62383]: INFO nova.scheduler.client.report [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Deleted allocations for instance e2188b8a-4982-49ea-945c-3e0a3437ae81 [ 1292.877301] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Releasing lock "refresh_cache-ebc0fa37-eb1a-421c-a421-cd990c6b84dc" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1292.877745] env[62383]: DEBUG nova.compute.manager [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1292.877937] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1292.878821] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c432ade9-6976-4988-80ee-7ab05fc9ab44 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.886740] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1292.886960] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12998c67-7243-4665-b2c1-be59f18cf46e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.893557] env[62383]: DEBUG oslo_vmware.api [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1292.893557] env[62383]: value = "task-2452667" [ 1292.893557] env[62383]: _type = "Task" [ 1292.893557] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.900748] env[62383]: DEBUG oslo_vmware.api [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.290930] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5e7738bf-bbe7-43f8-9281-2aae4cfd2aff tempest-InstanceActionsV221TestJSON-588858376 tempest-InstanceActionsV221TestJSON-588858376-project-member] Lock "e2188b8a-4982-49ea-945c-3e0a3437ae81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.543s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1293.403955] env[62383]: DEBUG oslo_vmware.api [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452667, 'name': PowerOffVM_Task, 'duration_secs': 0.122203} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.404241] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1293.404413] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1293.404658] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d29516a2-9fdc-4b3d-b62b-5ea85879e9d5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.429601] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1293.429800] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1293.429980] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleting the datastore file [datastore2] ebc0fa37-eb1a-421c-a421-cd990c6b84dc {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1293.430545] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c568eb2d-c038-4391-8641-31e184b20337 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.436816] env[62383]: DEBUG oslo_vmware.api [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for the task: (returnval){ [ 1293.436816] env[62383]: value = "task-2452669" [ 1293.436816] env[62383]: _type = "Task" [ 1293.436816] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.443956] env[62383]: DEBUG oslo_vmware.api [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.946814] env[62383]: DEBUG oslo_vmware.api [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Task: {'id': task-2452669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093219} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.947075] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1293.947266] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1293.947446] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1293.947620] env[62383]: INFO nova.compute.manager [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1293.947963] env[62383]: DEBUG oslo.service.loopingcall [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.948091] env[62383]: DEBUG nova.compute.manager [-] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1293.948184] env[62383]: DEBUG nova.network.neutron [-] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1293.963767] env[62383]: DEBUG nova.network.neutron [-] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1294.466813] env[62383]: DEBUG nova.network.neutron [-] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.969577] env[62383]: INFO nova.compute.manager [-] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Took 1.02 seconds to deallocate network for instance. [ 1295.140020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.140020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.140020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.140020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.140020] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.140497] env[62383]: INFO nova.compute.manager [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Terminating instance [ 1295.476034] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.476313] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.476516] env[62383]: DEBUG nova.objects.instance [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lazy-loading 'resources' on Instance uuid ebc0fa37-eb1a-421c-a421-cd990c6b84dc {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1295.644620] env[62383]: DEBUG nova.compute.manager [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1295.644865] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1295.645842] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a563fa07-44c7-4732-b1b4-fd98254f8d58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.654174] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1295.654412] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-310fcf01-bc62-46aa-82e4-9fb4e4376017 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.661446] env[62383]: DEBUG oslo_vmware.api [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1295.661446] env[62383]: value = "task-2452670" [ 1295.661446] env[62383]: _type = "Task" [ 1295.661446] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.670414] env[62383]: DEBUG oslo_vmware.api [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.032517] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a09ff26-3937-4b37-a12b-47c40903a1c8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.039946] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c336ca6d-8e85-4a58-9853-3dc8052e5c83 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.069444] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132ea826-86dc-40c1-9792-d1ad7924c195 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.076138] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdbf1d7-aa0c-4daf-9e88-548243bb6ee4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.088657] env[62383]: DEBUG nova.compute.provider_tree [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1296.171040] env[62383]: DEBUG oslo_vmware.api [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452670, 'name': PowerOffVM_Task, 'duration_secs': 0.199415} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.171352] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1296.171478] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1296.171722] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b59fe15c-d385-4ba6-a8d7-25ab93d53e1d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.239698] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1296.239915] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1296.240113] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleting the datastore file [datastore2] b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.240391] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb9ff34a-36f7-4063-b3b3-eff71c5154f3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.246703] env[62383]: DEBUG oslo_vmware.api [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for the task: (returnval){ [ 1296.246703] env[62383]: value = "task-2452672" [ 1296.246703] env[62383]: _type = "Task" [ 1296.246703] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.254736] env[62383]: DEBUG oslo_vmware.api [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452672, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.591809] env[62383]: DEBUG nova.scheduler.client.report [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1296.756602] env[62383]: DEBUG oslo_vmware.api [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Task: {'id': task-2452672, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164308} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.756850] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1296.757022] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1296.757208] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1296.757381] env[62383]: INFO nova.compute.manager [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1296.757623] env[62383]: DEBUG oslo.service.loopingcall [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1296.757815] env[62383]: DEBUG nova.compute.manager [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1296.757906] env[62383]: DEBUG nova.network.neutron [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1297.096390] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.620s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.115045] env[62383]: INFO nova.scheduler.client.report [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Deleted allocations for instance ebc0fa37-eb1a-421c-a421-cd990c6b84dc [ 1297.191931] env[62383]: DEBUG nova.compute.manager [req-626c506b-b838-41d1-910a-81c4bf6f7c32 req-82f9d5b2-18b6-440d-9d2e-640f90e7c09d service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Received event network-vif-deleted-f1d1962c-c0f2-4e5b-9885-b71019f7e792 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1297.192164] env[62383]: INFO nova.compute.manager [req-626c506b-b838-41d1-910a-81c4bf6f7c32 req-82f9d5b2-18b6-440d-9d2e-640f90e7c09d service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Neutron deleted interface f1d1962c-c0f2-4e5b-9885-b71019f7e792; detaching it from the instance and deleting it from the info cache [ 1297.192343] env[62383]: DEBUG nova.network.neutron [req-626c506b-b838-41d1-910a-81c4bf6f7c32 req-82f9d5b2-18b6-440d-9d2e-640f90e7c09d service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.624076] env[62383]: DEBUG oslo_concurrency.lockutils [None req-629fae7d-0fd5-4cd6-9e2e-114fb6c916eb tempest-ServerShowV247Test-1346384455 tempest-ServerShowV247Test-1346384455-project-member] Lock "ebc0fa37-eb1a-421c-a421-cd990c6b84dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.326s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.675659] env[62383]: DEBUG nova.network.neutron [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.694939] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4423713-32d9-407a-9865-6d674c090dd7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.705531] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606c8875-8280-4ebc-9dd7-bd5aec040496 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.730520] env[62383]: DEBUG nova.compute.manager [req-626c506b-b838-41d1-910a-81c4bf6f7c32 req-82f9d5b2-18b6-440d-9d2e-640f90e7c09d service nova] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Detach interface failed, port_id=f1d1962c-c0f2-4e5b-9885-b71019f7e792, reason: Instance b8e512cd-5eb9-423c-9447-833e34909bc3 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1298.178561] env[62383]: INFO nova.compute.manager [-] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Took 1.42 seconds to deallocate network for instance. [ 1298.685637] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.686050] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.686193] env[62383]: DEBUG nova.objects.instance [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lazy-loading 'resources' on Instance uuid b8e512cd-5eb9-423c-9447-833e34909bc3 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1299.231549] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77184eda-67b5-4d32-8bb1-3eb00821a255 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.239013] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fde22b-e418-4ae5-9f17-5c5b08bb4ac4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.269938] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b9546e-90d1-4ab8-8883-5089b6c26000 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.277021] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcfd975-bbdf-4e14-8bf3-751cd00074eb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.289871] env[62383]: DEBUG nova.compute.provider_tree [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.793406] env[62383]: DEBUG nova.scheduler.client.report [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1300.298632] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.613s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.315766] env[62383]: INFO nova.scheduler.client.report [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Deleted allocations for instance b8e512cd-5eb9-423c-9447-833e34909bc3 [ 1300.823031] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dbf3d97f-e43c-40dd-b313-5c3d939508bc tempest-AttachVolumeShelveTestJSON-203390778 tempest-AttachVolumeShelveTestJSON-203390778-project-member] Lock "b8e512cd-5eb9-423c-9447-833e34909bc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.685s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.753652] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.753874] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.256887] env[62383]: DEBUG nova.compute.utils [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1302.759970] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.817965] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.818360] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.818484] env[62383]: INFO nova.compute.manager [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Attaching volume 0f861812-68ca-445e-ae14-41fe457cf6dc to /dev/sdb [ 1303.847483] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847b61d0-e998-4611-a39c-1d73ad12e3e8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.855784] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8c3641-33c1-4c11-8320-9a8954b25700 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.869481] env[62383]: DEBUG nova.virt.block_device [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating existing volume attachment record: 8de65a5b-8828-4c26-8895-e6cde6d65bf4 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1308.413020] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1308.413306] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496639', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'name': 'volume-0f861812-68ca-445e-ae14-41fe457cf6dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e05e6a48-6992-41f2-a937-2f8e022cf1f5', 'attached_at': '', 'detached_at': '', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'serial': '0f861812-68ca-445e-ae14-41fe457cf6dc'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1308.414252] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db201f9-9ff9-49d0-a87e-e4f2ea01d195 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.431173] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70810b6c-230e-49b2-81f9-685fde4fc975 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.455188] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] volume-0f861812-68ca-445e-ae14-41fe457cf6dc/volume-0f861812-68ca-445e-ae14-41fe457cf6dc.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1308.455430] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9acbc3a7-3651-4e46-a4c7-4b1e61b07f2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.472854] env[62383]: DEBUG oslo_vmware.api [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1308.472854] env[62383]: value = "task-2452678" [ 1308.472854] env[62383]: _type = "Task" [ 1308.472854] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.480445] env[62383]: DEBUG oslo_vmware.api [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452678, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.982203] env[62383]: DEBUG oslo_vmware.api [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452678, 'name': ReconfigVM_Task, 'duration_secs': 0.307997} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.982599] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Reconfigured VM instance instance-0000007a to attach disk [datastore2] volume-0f861812-68ca-445e-ae14-41fe457cf6dc/volume-0f861812-68ca-445e-ae14-41fe457cf6dc.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1308.987387] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8445d32-12a5-46a8-97c2-16a9142311bb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.001593] env[62383]: DEBUG oslo_vmware.api [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1309.001593] env[62383]: value = "task-2452679" [ 1309.001593] env[62383]: _type = "Task" [ 1309.001593] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.008974] env[62383]: DEBUG oslo_vmware.api [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.510737] env[62383]: DEBUG oslo_vmware.api [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452679, 'name': ReconfigVM_Task, 'duration_secs': 0.132954} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.511153] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496639', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'name': 'volume-0f861812-68ca-445e-ae14-41fe457cf6dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e05e6a48-6992-41f2-a937-2f8e022cf1f5', 'attached_at': '', 'detached_at': '', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'serial': '0f861812-68ca-445e-ae14-41fe457cf6dc'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1310.543882] env[62383]: DEBUG nova.objects.instance [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1311.049891] env[62383]: DEBUG oslo_concurrency.lockutils [None req-cc997121-3559-4d31-9ae6-2f0aeb06dddd tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.231s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.174497] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.174743] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.174932] env[62383]: DEBUG nova.compute.manager [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1311.175870] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d512851c-a6e2-48e6-b998-3a466ee42ecb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.182649] env[62383]: DEBUG nova.compute.manager [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1311.183197] env[62383]: DEBUG nova.objects.instance [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1312.190518] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1312.191257] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c00917a5-b547-411b-a402-f806c05167ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.198721] env[62383]: DEBUG oslo_vmware.api [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1312.198721] env[62383]: value = "task-2452680" [ 1312.198721] env[62383]: _type = "Task" [ 1312.198721] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.206124] env[62383]: DEBUG oslo_vmware.api [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.708364] env[62383]: DEBUG oslo_vmware.api [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452680, 'name': PowerOffVM_Task, 'duration_secs': 0.183067} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.708620] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1312.708814] env[62383]: DEBUG nova.compute.manager [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1312.709580] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838f1798-9cd5-46cc-b55e-614a947008b8 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.220262] env[62383]: DEBUG oslo_concurrency.lockutils [None req-99fe859f-a904-4360-ab36-2864f60d2f71 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.045s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1313.531437] env[62383]: DEBUG nova.objects.instance [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1314.036669] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.036937] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.037106] env[62383]: DEBUG nova.network.neutron [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.037293] env[62383]: DEBUG nova.objects.instance [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'info_cache' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1314.540935] env[62383]: DEBUG nova.objects.base [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1315.232120] env[62383]: DEBUG nova.network.neutron [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [{"id": "7cbb97fd-c997-46f0-8174-980bf3cec122", "address": "fa:16:3e:47:0a:0f", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbb97fd-c9", "ovs_interfaceid": "7cbb97fd-c997-46f0-8174-980bf3cec122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.735587] env[62383]: DEBUG oslo_concurrency.lockutils [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.742156] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1316.742515] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec50245f-b2ac-4cbc-acc5-0b569e8f497c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.750620] env[62383]: DEBUG oslo_vmware.api [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1316.750620] env[62383]: value = "task-2452681" [ 1316.750620] env[62383]: _type = "Task" [ 1316.750620] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.757989] env[62383]: DEBUG oslo_vmware.api [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.260780] env[62383]: DEBUG oslo_vmware.api [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452681, 'name': PowerOnVM_Task, 'duration_secs': 0.387376} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.261054] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1317.261281] env[62383]: DEBUG nova.compute.manager [None req-dee7eaa2-81be-47b7-9875-bef8fb3b1b44 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1317.262039] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce441ae4-c377-4cfe-a58b-76673c395dcf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.240437] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1325.744092] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.744375] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.744514] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.744672] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1325.745586] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ad3752-1248-40b5-ad38-080282e7039b {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.754050] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcba7b8-201b-4d19-ab3e-4335ad7e02de {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.767775] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4a3145-6c79-4d49-b023-cc30e9dd8c1e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.773837] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e5132b-5b1f-4345-923d-3f88da3176db {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.803613] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180616MB free_disk=146GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1325.803758] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.803945] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.829061] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance e05e6a48-6992-41f2-a937-2f8e022cf1f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1326.829372] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1326.829489] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1326.856533] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e60f62-cf7d-421e-9322-7fd771ff3b05 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.864298] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69c49f1-8901-4b0c-ad99-0791b3d01ef2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.892996] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfacfe9-6a0a-48b3-9b99-f3e7df460c1f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.899550] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a062e74-cbc2-4502-a88f-4c256589fb2e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.911981] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.414742] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1327.920255] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1327.920718] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.116s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.915921] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.916376] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.916376] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1331.948959] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.949295] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.949295] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1333.162049] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [{"id": "7cbb97fd-c997-46f0-8174-980bf3cec122", "address": "fa:16:3e:47:0a:0f", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbb97fd-c9", "ovs_interfaceid": "7cbb97fd-c997-46f0-8174-980bf3cec122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.664781] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.664995] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1333.665217] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.665439] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.665603] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.665752] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.665892] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.666093] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.666250] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1335.985185] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1353.700581] env[62383]: DEBUG oslo_concurrency.lockutils [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.700966] env[62383]: DEBUG oslo_concurrency.lockutils [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.204117] env[62383]: INFO nova.compute.manager [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Detaching volume 0f861812-68ca-445e-ae14-41fe457cf6dc [ 1354.234129] env[62383]: INFO nova.virt.block_device [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Attempting to driver detach volume 0f861812-68ca-445e-ae14-41fe457cf6dc from mountpoint /dev/sdb [ 1354.234391] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1354.234583] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496639', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'name': 'volume-0f861812-68ca-445e-ae14-41fe457cf6dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e05e6a48-6992-41f2-a937-2f8e022cf1f5', 'attached_at': '', 'detached_at': '', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'serial': '0f861812-68ca-445e-ae14-41fe457cf6dc'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1354.235522] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79249424-b2a1-43e2-98fe-94592c21e5e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.257047] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282168f9-0447-4d69-9086-f6b107169ab7 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.263559] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20405183-80c8-467a-a04a-f27a664711fa {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.282962] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bc40bb-4ad9-4139-9d00-33703440d1f9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.296559] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] The volume has not been displaced from its original location: [datastore2] volume-0f861812-68ca-445e-ae14-41fe457cf6dc/volume-0f861812-68ca-445e-ae14-41fe457cf6dc.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1354.301694] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Reconfiguring VM instance instance-0000007a to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1354.301936] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdca2ee8-69ce-4b74-a4ef-a9ee5f2623fc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.320026] env[62383]: DEBUG oslo_vmware.api [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1354.320026] env[62383]: value = "task-2452682" [ 1354.320026] env[62383]: _type = "Task" [ 1354.320026] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.327159] env[62383]: DEBUG oslo_vmware.api [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.829221] env[62383]: DEBUG oslo_vmware.api [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452682, 'name': ReconfigVM_Task, 'duration_secs': 0.245898} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.829602] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Reconfigured VM instance instance-0000007a to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1354.834065] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7881a439-bc4d-49ad-9136-fb0f29bf2a0f {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.848430] env[62383]: DEBUG oslo_vmware.api [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1354.848430] env[62383]: value = "task-2452683" [ 1354.848430] env[62383]: _type = "Task" [ 1354.848430] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.855601] env[62383]: DEBUG oslo_vmware.api [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452683, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.358190] env[62383]: DEBUG oslo_vmware.api [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452683, 'name': ReconfigVM_Task, 'duration_secs': 0.123338} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.358501] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496639', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'name': 'volume-0f861812-68ca-445e-ae14-41fe457cf6dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e05e6a48-6992-41f2-a937-2f8e022cf1f5', 'attached_at': '', 'detached_at': '', 'volume_id': '0f861812-68ca-445e-ae14-41fe457cf6dc', 'serial': '0f861812-68ca-445e-ae14-41fe457cf6dc'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1355.896919] env[62383]: DEBUG nova.objects.instance [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1356.862956] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.905064] env[62383]: DEBUG oslo_concurrency.lockutils [None req-92a1797c-ea07-4188-b8ce-079cfc513d54 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.204s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1356.906163] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.043s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1356.906375] env[62383]: DEBUG nova.compute.manager [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.907269] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100ee298-7731-4d73-acc4-f4e3d927dfdf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.914996] env[62383]: DEBUG nova.compute.manager [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62383) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1356.915613] env[62383]: DEBUG nova.objects.instance [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1357.922208] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1357.922604] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bcfb95d-955e-4c70-9673-2a307a2cf942 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.929603] env[62383]: DEBUG oslo_vmware.api [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1357.929603] env[62383]: value = "task-2452684" [ 1357.929603] env[62383]: _type = "Task" [ 1357.929603] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.938232] env[62383]: DEBUG oslo_vmware.api [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452684, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.439417] env[62383]: DEBUG oslo_vmware.api [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452684, 'name': PowerOffVM_Task, 'duration_secs': 0.21467} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.439693] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1358.439872] env[62383]: DEBUG nova.compute.manager [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1358.440625] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b712b250-bae2-4791-95fc-f20657a66e58 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.951867] env[62383]: DEBUG oslo_concurrency.lockutils [None req-7e1e6d07-2545-4963-85fd-22e8694e3b42 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.193950] env[62383]: DEBUG nova.objects.instance [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1359.699282] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.699483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.699614] env[62383]: DEBUG nova.network.neutron [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1359.699789] env[62383]: DEBUG nova.objects.instance [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'info_cache' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1360.204064] env[62383]: DEBUG nova.objects.base [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62383) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1360.896882] env[62383]: DEBUG nova.network.neutron [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [{"id": "7cbb97fd-c997-46f0-8174-980bf3cec122", "address": "fa:16:3e:47:0a:0f", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cbb97fd-c9", "ovs_interfaceid": "7cbb97fd-c997-46f0-8174-980bf3cec122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.400228] env[62383]: DEBUG oslo_concurrency.lockutils [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "refresh_cache-e05e6a48-6992-41f2-a937-2f8e022cf1f5" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.406303] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1362.406643] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a923f2e9-135d-435c-9f98-42d16d00d483 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.413892] env[62383]: DEBUG oslo_vmware.api [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1362.413892] env[62383]: value = "task-2452685" [ 1362.413892] env[62383]: _type = "Task" [ 1362.413892] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.421593] env[62383]: DEBUG oslo_vmware.api [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452685, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.923884] env[62383]: DEBUG oslo_vmware.api [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452685, 'name': PowerOnVM_Task, 'duration_secs': 0.361354} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.924252] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.924516] env[62383]: DEBUG nova.compute.manager [None req-e13c9210-eb19-49f1-a4f4-2399ae7f159f tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1362.925564] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff03147d-e198-4216-8c82-74c76bdacb31 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.241942] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.745587] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.745829] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.746241] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.746558] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1385.747541] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d098f469-ae4a-46cb-b977-884f1b4a2663 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.756219] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524110e7-c6ee-44a4-b344-af5a7bf0af0d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.773272] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9582be04-b0ff-4f7b-b063-4034e3da7989 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.779524] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46910c9-c62a-45f5-98bd-a36382c88b6d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.808048] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181011MB free_disk=146GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1385.808048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.808048] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.832816] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance e05e6a48-6992-41f2-a937-2f8e022cf1f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1386.833100] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1386.833197] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1386.858544] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9429f320-3edf-41f0-b98f-83d78c121e50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.865987] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8263f3-45a9-4775-8ff0-b049a57e62ac {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.895629] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb929bd3-ef00-4495-89f8-200100f985c5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.902336] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279551bf-dd0e-4abe-aee6-f1fd18e99988 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.914961] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.417790] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1387.419097] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1387.419281] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.611s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.412936] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.413365] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.413365] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1391.916155] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Didn't find any instances for network info cache update. {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10531}} [ 1391.916382] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.916539] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.240037] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.240288] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.240466] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1392.240612] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1393.241047] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.742703] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.240368] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.240758] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Cleaning up deleted instances {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11745}} [ 1396.748723] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] There are 24 instances to clean {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11754}} [ 1396.748900] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: e2188b8a-4982-49ea-945c-3e0a3437ae81] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1397.251998] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a743070f-cdda-4bf3-a15a-8199e9d57e3c] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1397.755304] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: ebc0fa37-eb1a-421c-a421-cd990c6b84dc] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1398.258811] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8d8f91e1-2bf6-420d-8df3-f1e5f18b41a8] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1398.762168] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 02e2865c-ff68-44ac-abc6-839e399bbe7c] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1399.265710] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 991d6eb0-94e7-4d7c-bd85-3c8ef28daa49] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1399.314603] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.314856] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.315125] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.315320] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.315495] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.318804] env[62383]: INFO nova.compute.manager [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Terminating instance [ 1399.768637] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: b8e512cd-5eb9-423c-9447-833e34909bc3] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1399.822478] env[62383]: DEBUG nova.compute.manager [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1399.822715] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1399.823839] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65884784-83c8-4e34-a5ee-6220e8f35a50 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.831763] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1399.831988] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31fc0385-d51c-4a07-ac12-4a08f7ca8a28 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.838163] env[62383]: DEBUG oslo_vmware.api [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1399.838163] env[62383]: value = "task-2452686" [ 1399.838163] env[62383]: _type = "Task" [ 1399.838163] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.845492] env[62383]: DEBUG oslo_vmware.api [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.271961] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: a4e3b5a2-98c2-4376-bafd-49ccee64b262] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1400.347976] env[62383]: DEBUG oslo_vmware.api [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452686, 'name': PowerOffVM_Task, 'duration_secs': 0.171452} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.347976] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1400.348203] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1400.348313] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cbead2b3-2691-4787-ac89-2e6c63662b88 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.409858] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1400.410078] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1400.410262] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Deleting the datastore file [datastore2] e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1400.410511] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72e9c5b7-d3a8-4b35-a8ce-8f89580bfea2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.416712] env[62383]: DEBUG oslo_vmware.api [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1400.416712] env[62383]: value = "task-2452688" [ 1400.416712] env[62383]: _type = "Task" [ 1400.416712] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.424353] env[62383]: DEBUG oslo_vmware.api [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.775508] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: b6ed40a8-674f-4179-8642-848ab0a2d31b] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1400.926511] env[62383]: DEBUG oslo_vmware.api [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143941} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.926691] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1400.926877] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1400.927096] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1400.927282] env[62383]: INFO nova.compute.manager [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1400.927532] env[62383]: DEBUG oslo.service.loopingcall [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1400.928034] env[62383]: DEBUG nova.compute.manager [-] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1400.928155] env[62383]: DEBUG nova.network.neutron [-] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1401.278554] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 12843fba-0240-44fb-9687-d34a6333011b] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1401.358623] env[62383]: DEBUG nova.compute.manager [req-fc7c5357-4b9b-485f-bbc1-f3be8f4f1553 req-e4b516e3-83ad-48c2-8b39-82312128b0fb service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Received event network-vif-deleted-7cbb97fd-c997-46f0-8174-980bf3cec122 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1401.358887] env[62383]: INFO nova.compute.manager [req-fc7c5357-4b9b-485f-bbc1-f3be8f4f1553 req-e4b516e3-83ad-48c2-8b39-82312128b0fb service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Neutron deleted interface 7cbb97fd-c997-46f0-8174-980bf3cec122; detaching it from the instance and deleting it from the info cache [ 1401.359050] env[62383]: DEBUG nova.network.neutron [req-fc7c5357-4b9b-485f-bbc1-f3be8f4f1553 req-e4b516e3-83ad-48c2-8b39-82312128b0fb service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.782090] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: ac4e173d-fec9-4a0f-b9b6-ad83a98989e7] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1401.843705] env[62383]: DEBUG nova.network.neutron [-] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.861303] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca1b6662-e73f-4cb8-be5f-2e6028709f72 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.871637] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db6dd59-7bea-4ccb-9f36-797c2e55d3f4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.895600] env[62383]: DEBUG nova.compute.manager [req-fc7c5357-4b9b-485f-bbc1-f3be8f4f1553 req-e4b516e3-83ad-48c2-8b39-82312128b0fb service nova] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Detach interface failed, port_id=7cbb97fd-c997-46f0-8174-980bf3cec122, reason: Instance e05e6a48-6992-41f2-a937-2f8e022cf1f5 could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1402.285521] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 89033750-629f-4ddb-a309-56d50f798a8d] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1402.345309] env[62383]: INFO nova.compute.manager [-] [instance: e05e6a48-6992-41f2-a937-2f8e022cf1f5] Took 1.42 seconds to deallocate network for instance. [ 1402.789576] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: f7584d2c-5add-4764-9aed-22f7d1674854] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1402.851277] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.851553] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.851788] env[62383]: DEBUG nova.objects.instance [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'resources' on Instance uuid e05e6a48-6992-41f2-a937-2f8e022cf1f5 {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1403.292522] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 687912b8-40d2-4243-b31c-06107aa6cfb9] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1403.524238] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40dd60b-f274-4be7-8960-3904fd0eca00 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.531734] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535c277a-311c-4868-bddc-f660f1282607 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.562133] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31f4bf2-1eea-46b4-b1d7-047aa302b6af {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.568986] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a816683-e27e-4dab-b3e2-881ef52993e9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.581654] env[62383]: DEBUG nova.compute.provider_tree [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1403.796090] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: c94e9a83-04de-4144-ab6e-d96dc7c39e6d] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1404.085261] env[62383]: DEBUG nova.scheduler.client.report [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1404.299783] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 7cece477-9444-4ffd-88a0-d6c821cb7275] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1404.590246] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.611600] env[62383]: INFO nova.scheduler.client.report [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Deleted allocations for instance e05e6a48-6992-41f2-a937-2f8e022cf1f5 [ 1404.802809] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 91ce6946-0c8a-4b59-bbe0-54a566a57cdb] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1405.118980] env[62383]: DEBUG oslo_concurrency.lockutils [None req-17bff503-8879-407d-aacb-a4f6a957eff1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "e05e6a48-6992-41f2-a937-2f8e022cf1f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.804s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.306321] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: de24aca8-30fc-453e-b192-b6bb115876ef] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1405.809470] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 0392d059-57ea-49fb-84d2-b71cbca840db] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1406.312717] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 8d9d6f3b-aef7-478a-a43e-3b621f1b3845] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1406.816179] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: ec7c648d-10b0-480a-a5f0-4dab08d0049e] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1407.319728] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 9f8e346e-815c-492d-84a9-00ebdca3bcc3] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1407.400179] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.400422] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.822776] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 1b025655-acad-4b70-9e1a-489683cafb7e] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1407.903628] env[62383]: DEBUG nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Starting instance... {{(pid=62383) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1408.326235] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: 2f028680-8db4-474a-8f24-880c4702877b] Instance has had 0 of 5 cleanup attempts {{(pid=62383) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11758}} [ 1408.421483] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.421797] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.423270] env[62383]: INFO nova.compute.claims [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.829331] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.829515] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Cleaning up deleted instances with incomplete migration {{(pid=62383) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11783}} [ 1409.457677] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45df9e7-049d-4306-b9e9-43ff3af5e8cb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.465448] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3ae66e-4518-4f49-b2d5-8ac4fcc317b9 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.496918] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad3d9b4-e470-45b5-a51c-8ede1209ba15 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.504704] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced55581-1ba9-405a-8988-a1b40d30fbff {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.518645] env[62383]: DEBUG nova.compute.provider_tree [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.022891] env[62383]: DEBUG nova.scheduler.client.report [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1410.530094] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.108s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.530643] env[62383]: DEBUG nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Start building networks asynchronously for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1411.035702] env[62383]: DEBUG nova.compute.utils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1411.037167] env[62383]: DEBUG nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Allocating IP information in the background. {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1411.037347] env[62383]: DEBUG nova.network.neutron [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] allocate_for_instance() {{(pid=62383) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1411.083472] env[62383]: DEBUG nova.policy [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7e092981766f4f63adaa0cbbb78fff9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0f48fbe0a7c49cf866e39daf3b5cf3c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62383) authorize /opt/stack/nova/nova/policy.py:192}} [ 1411.351814] env[62383]: DEBUG nova.network.neutron [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Successfully created port: 72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.540533] env[62383]: DEBUG nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Start building block device mappings for instance. {{(pid=62383) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1412.552043] env[62383]: DEBUG nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Start spawning the instance on the hypervisor. {{(pid=62383) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1412.577548] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-02-11T15:22:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-02-11T15:22:16Z,direct_url=,disk_format='vmdk',id=cac3b430-a1d5-4ad1-92ec-34c2261779a8,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4c10acdaa3604265b23d83059e3de218',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-02-11T15:22:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1412.577808] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Flavor limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.577966] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Image limits 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1412.578165] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Flavor pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.578314] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Image pref 0:0:0 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1412.578460] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62383) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1412.578672] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1412.578833] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1412.579005] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Got 1 possible topologies {{(pid=62383) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1412.579179] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1412.579354] env[62383]: DEBUG nova.virt.hardware [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62383) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1412.580224] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c8a890-0d58-4a6f-84dd-6ed457f55572 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.587774] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a155a08-0e69-4ae0-9b2d-45bb89fa41c2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.886283] env[62383]: DEBUG nova.compute.manager [req-2caec9c7-1149-43a3-ba53-8325bff8ddf4 req-27286cf3-402e-48ae-bdff-2aaefed13fec service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Received event network-vif-plugged-72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1412.887890] env[62383]: DEBUG oslo_concurrency.lockutils [req-2caec9c7-1149-43a3-ba53-8325bff8ddf4 req-27286cf3-402e-48ae-bdff-2aaefed13fec service nova] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.887890] env[62383]: DEBUG oslo_concurrency.lockutils [req-2caec9c7-1149-43a3-ba53-8325bff8ddf4 req-27286cf3-402e-48ae-bdff-2aaefed13fec service nova] Lock "d2465e54-944f-48a5-9174-4372353870ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.887890] env[62383]: DEBUG oslo_concurrency.lockutils [req-2caec9c7-1149-43a3-ba53-8325bff8ddf4 req-27286cf3-402e-48ae-bdff-2aaefed13fec service nova] Lock "d2465e54-944f-48a5-9174-4372353870ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.887890] env[62383]: DEBUG nova.compute.manager [req-2caec9c7-1149-43a3-ba53-8325bff8ddf4 req-27286cf3-402e-48ae-bdff-2aaefed13fec service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] No waiting events found dispatching network-vif-plugged-72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1412.887890] env[62383]: WARNING nova.compute.manager [req-2caec9c7-1149-43a3-ba53-8325bff8ddf4 req-27286cf3-402e-48ae-bdff-2aaefed13fec service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Received unexpected event network-vif-plugged-72d9470f-9ba0-4da1-8a01-2228345eaaa0 for instance with vm_state building and task_state spawning. [ 1412.964705] env[62383]: DEBUG nova.network.neutron [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Successfully updated port: 72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.469455] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.469623] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.469812] env[62383]: DEBUG nova.network.neutron [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Building network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.000319] env[62383]: DEBUG nova.network.neutron [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Instance cache missing network info. {{(pid=62383) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.113202] env[62383]: DEBUG nova.network.neutron [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating instance_info_cache with network_info: [{"id": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "address": "fa:16:3e:f1:19:a9", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d9470f-9b", "ovs_interfaceid": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.615542] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.615830] env[62383]: DEBUG nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Instance network_info: |[{"id": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "address": "fa:16:3e:f1:19:a9", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d9470f-9b", "ovs_interfaceid": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62383) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1414.616290] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:19:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72d9470f-9ba0-4da1-8a01-2228345eaaa0', 'vif_model': 'vmxnet3'}] {{(pid=62383) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1414.623779] env[62383]: DEBUG oslo.service.loopingcall [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.623982] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2465e54-944f-48a5-9174-4372353870ca] Creating VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1414.624222] env[62383]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc4a33f1-46bf-4649-8c91-7cf45b719cab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.646702] env[62383]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1414.646702] env[62383]: value = "task-2452689" [ 1414.646702] env[62383]: _type = "Task" [ 1414.646702] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.653852] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452689, 'name': CreateVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.913016] env[62383]: DEBUG nova.compute.manager [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Received event network-changed-72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1414.913272] env[62383]: DEBUG nova.compute.manager [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Refreshing instance network info cache due to event network-changed-72d9470f-9ba0-4da1-8a01-2228345eaaa0. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1414.913517] env[62383]: DEBUG oslo_concurrency.lockutils [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] Acquiring lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.913685] env[62383]: DEBUG oslo_concurrency.lockutils [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] Acquired lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.913880] env[62383]: DEBUG nova.network.neutron [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Refreshing network info cache for port 72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.156654] env[62383]: DEBUG oslo_vmware.api [-] Task: {'id': task-2452689, 'name': CreateVM_Task, 'duration_secs': 0.301363} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.157015] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2465e54-944f-48a5-9174-4372353870ca] Created VM on the ESX host {{(pid=62383) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1415.157505] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.157665] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.158000] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1415.158284] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd97f040-b23b-4cbe-9a1a-d33989d94415 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.162265] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1415.162265] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52a68460-6ecd-cb38-482a-e54128001c4d" [ 1415.162265] env[62383]: _type = "Task" [ 1415.162265] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.169285] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a68460-6ecd-cb38-482a-e54128001c4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.588945] env[62383]: DEBUG nova.network.neutron [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updated VIF entry in instance network info cache for port 72d9470f-9ba0-4da1-8a01-2228345eaaa0. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1415.589318] env[62383]: DEBUG nova.network.neutron [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating instance_info_cache with network_info: [{"id": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "address": "fa:16:3e:f1:19:a9", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d9470f-9b", "ovs_interfaceid": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.672212] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52a68460-6ecd-cb38-482a-e54128001c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.009841} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.672500] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.672742] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Processing image cac3b430-a1d5-4ad1-92ec-34c2261779a8 {{(pid=62383) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.672974] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.673139] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquired lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.673317] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.673562] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-896b90be-6ca4-4e2c-bb4e-f3a878229ade {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.682114] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62383) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.682250] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62383) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1415.682921] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79935d2d-dbe3-4b68-be5e-ba421bb88a00 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.687786] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1415.687786] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52736b9b-c362-43ad-ee84-521441eff874" [ 1415.687786] env[62383]: _type = "Task" [ 1415.687786] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.695109] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52736b9b-c362-43ad-ee84-521441eff874, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.092202] env[62383]: DEBUG oslo_concurrency.lockutils [req-56e17aff-99d5-47a4-ba5a-b351537d7d6f req-f4399d4b-ba57-4b30-b607-e601dc62f7c5 service nova] Releasing lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.197756] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52736b9b-c362-43ad-ee84-521441eff874, 'name': SearchDatastore_Task, 'duration_secs': 0.008166} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.198566] env[62383]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cecb409-1dee-4447-af10-59a5168cd999 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.203372] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1416.203372] env[62383]: value = "session[526c6062-9206-ac03-b2da-fd469a7c1551]52cb9008-cb63-dd3c-61cd-0d1c5cf25c90" [ 1416.203372] env[62383]: _type = "Task" [ 1416.203372] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.210369] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cb9008-cb63-dd3c-61cd-0d1c5cf25c90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.714131] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': session[526c6062-9206-ac03-b2da-fd469a7c1551]52cb9008-cb63-dd3c-61cd-0d1c5cf25c90, 'name': SearchDatastore_Task, 'duration_secs': 0.010542} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.714383] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Releasing lock "[datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.714673] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] d2465e54-944f-48a5-9174-4372353870ca/d2465e54-944f-48a5-9174-4372353870ca.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1416.714944] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b220c915-7f88-45ac-861c-d31e7b98ca87 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.722143] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1416.722143] env[62383]: value = "task-2452690" [ 1416.722143] env[62383]: _type = "Task" [ 1416.722143] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.729361] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.231338] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46771} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.231731] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/cac3b430-a1d5-4ad1-92ec-34c2261779a8/cac3b430-a1d5-4ad1-92ec-34c2261779a8.vmdk to [datastore2] d2465e54-944f-48a5-9174-4372353870ca/d2465e54-944f-48a5-9174-4372353870ca.vmdk {{(pid=62383) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1417.231813] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Extending root virtual disk to 1048576 {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1417.232047] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1dd25bf-aa60-4421-ae70-c9d93af83736 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.238419] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1417.238419] env[62383]: value = "task-2452691" [ 1417.238419] env[62383]: _type = "Task" [ 1417.238419] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.245391] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.747450] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060498} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.747910] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Extended root virtual disk {{(pid=62383) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1417.748715] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e360a11-f167-4a3b-b7af-644eed485c96 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.769965] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] d2465e54-944f-48a5-9174-4372353870ca/d2465e54-944f-48a5-9174-4372353870ca.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1417.770337] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab1fe115-4b98-4226-9568-16fcc367d1ae {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.789536] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1417.789536] env[62383]: value = "task-2452692" [ 1417.789536] env[62383]: _type = "Task" [ 1417.789536] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.797787] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452692, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.300329] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452692, 'name': ReconfigVM_Task, 'duration_secs': 0.269203} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.300691] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfigured VM instance instance-0000007e to attach disk [datastore2] d2465e54-944f-48a5-9174-4372353870ca/d2465e54-944f-48a5-9174-4372353870ca.vmdk or device None with type sparse {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1418.301162] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd89b62d-e03a-44e2-84e8-61dfd4d6278c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.309277] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1418.309277] env[62383]: value = "task-2452693" [ 1418.309277] env[62383]: _type = "Task" [ 1418.309277] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.316649] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452693, 'name': Rename_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.819113] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452693, 'name': Rename_Task, 'duration_secs': 0.138424} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.819391] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Powering on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1418.819731] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-576580c4-f81e-40c9-92f7-7d3848debca1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.826701] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1418.826701] env[62383]: value = "task-2452694" [ 1418.826701] env[62383]: _type = "Task" [ 1418.826701] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.833820] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452694, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.336500] env[62383]: DEBUG oslo_vmware.api [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452694, 'name': PowerOnVM_Task, 'duration_secs': 0.423463} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.336997] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Powered on the VM {{(pid=62383) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.336997] env[62383]: INFO nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Took 6.79 seconds to spawn the instance on the hypervisor. [ 1419.337174] env[62383]: DEBUG nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Checking state {{(pid=62383) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1419.337932] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056f4f6b-c185-48dc-a3d2-3949e53c7138 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.855537] env[62383]: INFO nova.compute.manager [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Took 11.45 seconds to build instance. [ 1420.304147] env[62383]: DEBUG nova.compute.manager [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Received event network-changed-72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1420.304416] env[62383]: DEBUG nova.compute.manager [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Refreshing instance network info cache due to event network-changed-72d9470f-9ba0-4da1-8a01-2228345eaaa0. {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11653}} [ 1420.304682] env[62383]: DEBUG oslo_concurrency.lockutils [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] Acquiring lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.304871] env[62383]: DEBUG oslo_concurrency.lockutils [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] Acquired lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.305147] env[62383]: DEBUG nova.network.neutron [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Refreshing network info cache for port 72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1420.358188] env[62383]: DEBUG oslo_concurrency.lockutils [None req-fb77a68f-6dfe-425e-84a7-156a4663ce36 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.958s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.000642] env[62383]: DEBUG nova.network.neutron [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updated VIF entry in instance network info cache for port 72d9470f-9ba0-4da1-8a01-2228345eaaa0. {{(pid=62383) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1421.000992] env[62383]: DEBUG nova.network.neutron [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating instance_info_cache with network_info: [{"id": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "address": "fa:16:3e:f1:19:a9", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d9470f-9b", "ovs_interfaceid": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.504132] env[62383]: DEBUG oslo_concurrency.lockutils [req-80b88701-e66f-4b2f-bafe-c05348d2f163 req-32d4cf58-2cf6-4622-9d07-c639c561d799 service nova] Releasing lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.552876] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.553195] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.056833] env[62383]: DEBUG nova.compute.utils [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1458.331876] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.332114] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.559298] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.836458] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.836642] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Starting heal instance info cache {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10445}} [ 1458.836724] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Rebuilding the list of instances to heal {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10449}} [ 1459.450183] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.450379] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquired lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.450555] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: d2465e54-944f-48a5-9174-4372353870ca] Forcefully refreshing network info cache for instance {{(pid=62383) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1459.450761] env[62383]: DEBUG nova.objects.instance [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lazy-loading 'info_cache' on Instance uuid d2465e54-944f-48a5-9174-4372353870ca {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.616439] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.616815] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.616883] env[62383]: INFO nova.compute.manager [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Attaching volume 3fe04bbd-acf8-4ab4-974a-895848cb989d to /dev/sdb [ 1459.646858] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd13c4d7-4d34-4137-b465-0d847a0455be {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.653982] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68a4f43-6609-42aa-8713-e0e646070ce5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.666950] env[62383]: DEBUG nova.virt.block_device [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating existing volume attachment record: 9c1e1974-e73e-4cad-8f63-e19d290bad3b {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1461.159449] env[62383]: DEBUG nova.network.neutron [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating instance_info_cache with network_info: [{"id": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "address": "fa:16:3e:f1:19:a9", "network": {"id": "d6db21db-a228-4a15-87b4-c84d200e4b37", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2125079352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e0f48fbe0a7c49cf866e39daf3b5cf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "196c2dd2-7ffc-4f7d-9c93-e1ef0a6a3a9f", "external-id": "nsx-vlan-transportzone-584", "segmentation_id": 584, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72d9470f-9b", "ovs_interfaceid": "72d9470f-9ba0-4da1-8a01-2228345eaaa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.661882] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Releasing lock "refresh_cache-d2465e54-944f-48a5-9174-4372353870ca" {{(pid=62383) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.662123] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updated the network info_cache for instance {{(pid=62383) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10516}} [ 1461.662318] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.662563] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.662717] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.662866] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.663016] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.663176] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.663578] env[62383]: DEBUG nova.compute.manager [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62383) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11064}} [ 1461.663578] env[62383]: DEBUG oslo_service.periodic_task [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62383) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1462.166962] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.167404] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.167404] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.167567] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62383) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1462.168503] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdab479-618d-40fd-b284-0d70f42a4a61 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.176351] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29fc127-0eb0-453e-ad69-9443385aa495 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.189888] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7e5391-b4b1-4759-afb9-9ef7e3ad65d3 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.195956] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da27b96e-47c3-4a54-b774-b92f9d75fdb2 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.862372] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181192MB free_disk=146GB free_vcpus=48 pci_devices=None {{(pid=62383) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1462.862641] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.862788] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.886333] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Instance d2465e54-944f-48a5-9174-4372353870ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62383) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1463.886620] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1463.886701] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62383) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1463.912431] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4217bf3-37ac-40ae-85bb-5a83f1853bbb {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.919540] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee11df5-d1ac-4703-a61f-cc060fe33668 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.947732] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a77753-dc0c-4866-ac72-d716ef1b4834 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.954168] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57a01d6-8a0e-4a0f-afa1-26a157685a68 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.966415] env[62383]: DEBUG nova.compute.provider_tree [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.209176] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1464.209415] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496641', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'name': 'volume-3fe04bbd-acf8-4ab4-974a-895848cb989d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'serial': '3fe04bbd-acf8-4ab4-974a-895848cb989d'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1464.210286] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e415367c-b1c0-44b0-9517-d5f85f0390cf {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.226979] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f91052-bc37-4e67-8b2b-c4ba3add3861 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.250451] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] volume-3fe04bbd-acf8-4ab4-974a-895848cb989d/volume-3fe04bbd-acf8-4ab4-974a-895848cb989d.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1464.250672] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-739f597d-1c57-4bb4-bbc2-1a2132c50eba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.267476] env[62383]: DEBUG oslo_vmware.api [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1464.267476] env[62383]: value = "task-2452697" [ 1464.267476] env[62383]: _type = "Task" [ 1464.267476] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.274744] env[62383]: DEBUG oslo_vmware.api [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452697, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.469825] env[62383]: DEBUG nova.scheduler.client.report [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1464.776900] env[62383]: DEBUG oslo_vmware.api [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452697, 'name': ReconfigVM_Task, 'duration_secs': 0.325359} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.777179] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfigured VM instance instance-0000007e to attach disk [datastore2] volume-3fe04bbd-acf8-4ab4-974a-895848cb989d/volume-3fe04bbd-acf8-4ab4-974a-895848cb989d.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1464.781807] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85014b2f-3f96-4fe5-932e-ffe987094136 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.796264] env[62383]: DEBUG oslo_vmware.api [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1464.796264] env[62383]: value = "task-2452698" [ 1464.796264] env[62383]: _type = "Task" [ 1464.796264] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.803494] env[62383]: DEBUG oslo_vmware.api [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452698, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.974698] env[62383]: DEBUG nova.compute.resource_tracker [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62383) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1464.975114] env[62383]: DEBUG oslo_concurrency.lockutils [None req-753e80f5-4dc1-49ef-9813-5a34c944d1e2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.112s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.305830] env[62383]: DEBUG oslo_vmware.api [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452698, 'name': ReconfigVM_Task, 'duration_secs': 0.152984} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.306154] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496641', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'name': 'volume-3fe04bbd-acf8-4ab4-974a-895848cb989d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'serial': '3fe04bbd-acf8-4ab4-974a-895848cb989d'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1466.339436] env[62383]: DEBUG nova.objects.instance [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid d2465e54-944f-48a5-9174-4372353870ca {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1466.844899] env[62383]: DEBUG oslo_concurrency.lockutils [None req-56a86774-8c35-4401-a6b6-9feec1306926 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.228s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.694888] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.695211] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.198634] env[62383]: DEBUG nova.compute.utils [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Using /dev/sd instead of None {{(pid=62383) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1468.701852] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.758067] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.758490] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.758617] env[62383]: INFO nova.compute.manager [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Attaching volume 91286e3a-0726-4efc-823e-7cf96fd8a573 to /dev/sdc [ 1469.790256] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad684a9c-33c6-4161-9dd9-7918a97e06bc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.797752] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8e31fa-e718-4e1c-8c87-4950a8819b32 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.811808] env[62383]: DEBUG nova.virt.block_device [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating existing volume attachment record: ab427ade-0e8f-499d-99cf-576c3e584ce2 {{(pid=62383) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1474.365263] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Volume attach. Driver type: vmdk {{(pid=62383) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1474.365514] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496642', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'name': 'volume-91286e3a-0726-4efc-823e-7cf96fd8a573', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'serial': '91286e3a-0726-4efc-823e-7cf96fd8a573'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1474.366456] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c226bd-7f38-4bd7-af01-7d9e108085ce {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.382479] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003d08c0-f21a-430c-a3a2-c3c6f982ca56 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.408208] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] volume-91286e3a-0726-4efc-823e-7cf96fd8a573/volume-91286e3a-0726-4efc-823e-7cf96fd8a573.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1474.408437] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d93a2656-4cd4-4854-a4db-316aaf4cf76c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.426924] env[62383]: DEBUG oslo_vmware.api [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1474.426924] env[62383]: value = "task-2452701" [ 1474.426924] env[62383]: _type = "Task" [ 1474.426924] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.434250] env[62383]: DEBUG oslo_vmware.api [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452701, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.936894] env[62383]: DEBUG oslo_vmware.api [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452701, 'name': ReconfigVM_Task, 'duration_secs': 0.365314} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.937212] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfigured VM instance instance-0000007e to attach disk [datastore2] volume-91286e3a-0726-4efc-823e-7cf96fd8a573/volume-91286e3a-0726-4efc-823e-7cf96fd8a573.vmdk or device None with type thin {{(pid=62383) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1474.941731] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3940e9f1-a97f-4bcb-8fef-21807aba8610 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.956204] env[62383]: DEBUG oslo_vmware.api [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1474.956204] env[62383]: value = "task-2452702" [ 1474.956204] env[62383]: _type = "Task" [ 1474.956204] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.963268] env[62383]: DEBUG oslo_vmware.api [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452702, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.465945] env[62383]: DEBUG oslo_vmware.api [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452702, 'name': ReconfigVM_Task, 'duration_secs': 0.141701} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.466281] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496642', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'name': 'volume-91286e3a-0726-4efc-823e-7cf96fd8a573', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'serial': '91286e3a-0726-4efc-823e-7cf96fd8a573'} {{(pid=62383) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1476.499822] env[62383]: DEBUG nova.objects.instance [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid d2465e54-944f-48a5-9174-4372353870ca {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1477.004761] env[62383]: DEBUG oslo_concurrency.lockutils [None req-5845e8ad-7661-4c2f-a183-c066993bc9db tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.246s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.283122] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.283301] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.786505] env[62383]: INFO nova.compute.manager [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Detaching volume 3fe04bbd-acf8-4ab4-974a-895848cb989d [ 1477.816515] env[62383]: INFO nova.virt.block_device [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Attempting to driver detach volume 3fe04bbd-acf8-4ab4-974a-895848cb989d from mountpoint /dev/sdb [ 1477.816836] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1477.817490] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496641', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'name': 'volume-3fe04bbd-acf8-4ab4-974a-895848cb989d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'serial': '3fe04bbd-acf8-4ab4-974a-895848cb989d'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1477.817920] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54b5829-5686-454b-a685-60279baf9f00 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.842140] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e217160-7e76-40cb-9ff0-b769578097ba {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.848533] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b727a858-fee5-4d62-b038-9e6c1a8c0bfc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.871576] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7229087f-79b7-47cb-a16b-659e57db889c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.885580] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] The volume has not been displaced from its original location: [datastore2] volume-3fe04bbd-acf8-4ab4-974a-895848cb989d/volume-3fe04bbd-acf8-4ab4-974a-895848cb989d.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1477.890773] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfiguring VM instance instance-0000007e to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1477.891020] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c907970-b95b-4983-b599-33b78af63f6d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.907885] env[62383]: DEBUG oslo_vmware.api [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1477.907885] env[62383]: value = "task-2452703" [ 1477.907885] env[62383]: _type = "Task" [ 1477.907885] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.915338] env[62383]: DEBUG oslo_vmware.api [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452703, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.417467] env[62383]: DEBUG oslo_vmware.api [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452703, 'name': ReconfigVM_Task, 'duration_secs': 0.21087} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.417750] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfigured VM instance instance-0000007e to detach disk 2001 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1478.422254] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f3986f4-03bb-4c5e-bfbe-c7392fb1d714 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.436809] env[62383]: DEBUG oslo_vmware.api [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1478.436809] env[62383]: value = "task-2452704" [ 1478.436809] env[62383]: _type = "Task" [ 1478.436809] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.444013] env[62383]: DEBUG oslo_vmware.api [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452704, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.946028] env[62383]: DEBUG oslo_vmware.api [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452704, 'name': ReconfigVM_Task, 'duration_secs': 0.141773} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.946372] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496641', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'name': 'volume-3fe04bbd-acf8-4ab4-974a-895848cb989d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '3fe04bbd-acf8-4ab4-974a-895848cb989d', 'serial': '3fe04bbd-acf8-4ab4-974a-895848cb989d'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1479.486203] env[62383]: DEBUG nova.objects.instance [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid d2465e54-944f-48a5-9174-4372353870ca {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1480.493677] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f0edb465-8ed9-407f-a601-f49573c2b4a1 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.210s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.515222] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.515475] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.018931] env[62383]: INFO nova.compute.manager [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Detaching volume 91286e3a-0726-4efc-823e-7cf96fd8a573 [ 1481.048187] env[62383]: INFO nova.virt.block_device [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Attempting to driver detach volume 91286e3a-0726-4efc-823e-7cf96fd8a573 from mountpoint /dev/sdc [ 1481.048428] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Volume detach. Driver type: vmdk {{(pid=62383) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1481.048616] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496642', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'name': 'volume-91286e3a-0726-4efc-823e-7cf96fd8a573', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'serial': '91286e3a-0726-4efc-823e-7cf96fd8a573'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1481.049703] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b828714e-7229-4d1d-a466-75b653399371 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.071253] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e63c23f-f087-4f7d-b9e7-13c1e26d121d {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.077706] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154342a8-498d-4796-b199-4c7c9c82a34a {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.097056] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba3c1d4-3e53-48e1-8187-26367ceaffee {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.110971] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] The volume has not been displaced from its original location: [datastore2] volume-91286e3a-0726-4efc-823e-7cf96fd8a573/volume-91286e3a-0726-4efc-823e-7cf96fd8a573.vmdk. No consolidation needed. {{(pid=62383) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1481.115970] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfiguring VM instance instance-0000007e to detach disk 2002 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1481.116226] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f92c032-05fb-464d-a16a-4d8cfd00b303 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.133455] env[62383]: DEBUG oslo_vmware.api [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1481.133455] env[62383]: value = "task-2452705" [ 1481.133455] env[62383]: _type = "Task" [ 1481.133455] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.141790] env[62383]: DEBUG oslo_vmware.api [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452705, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.643191] env[62383]: DEBUG oslo_vmware.api [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452705, 'name': ReconfigVM_Task, 'duration_secs': 0.19969} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.643582] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Reconfigured VM instance instance-0000007e to detach disk 2002 {{(pid=62383) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1481.648020] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5595710-3b49-4dda-9733-59f408145586 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.662335] env[62383]: DEBUG oslo_vmware.api [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1481.662335] env[62383]: value = "task-2452706" [ 1481.662335] env[62383]: _type = "Task" [ 1481.662335] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.669817] env[62383]: DEBUG oslo_vmware.api [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452706, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.172503] env[62383]: DEBUG oslo_vmware.api [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452706, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.672831] env[62383]: DEBUG oslo_vmware.api [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452706, 'name': ReconfigVM_Task, 'duration_secs': 0.742997} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.673139] env[62383]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-496642', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'name': 'volume-91286e3a-0726-4efc-823e-7cf96fd8a573', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd2465e54-944f-48a5-9174-4372353870ca', 'attached_at': '', 'detached_at': '', 'volume_id': '91286e3a-0726-4efc-823e-7cf96fd8a573', 'serial': '91286e3a-0726-4efc-823e-7cf96fd8a573'} {{(pid=62383) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1483.211836] env[62383]: DEBUG nova.objects.instance [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'flavor' on Instance uuid d2465e54-944f-48a5-9174-4372353870ca {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1484.220070] env[62383]: DEBUG oslo_concurrency.lockutils [None req-0e427891-2ffa-4f8a-aac0-4e5299cb3149 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.704s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.885242] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.885530] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.885740] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "d2465e54-944f-48a5-9174-4372353870ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.885951] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.886153] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.888468] env[62383]: INFO nova.compute.manager [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Terminating instance [ 1485.392259] env[62383]: DEBUG nova.compute.manager [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Start destroying the instance on the hypervisor. {{(pid=62383) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1485.392660] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Destroying instance {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1485.393421] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2765e36f-646b-4823-b5d1-7047fd359878 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.401274] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Powering off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1485.401499] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90ce7ca7-5945-49ea-a176-68c85273960e {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.407493] env[62383]: DEBUG oslo_vmware.api [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1485.407493] env[62383]: value = "task-2452707" [ 1485.407493] env[62383]: _type = "Task" [ 1485.407493] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.415063] env[62383]: DEBUG oslo_vmware.api [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.917887] env[62383]: DEBUG oslo_vmware.api [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452707, 'name': PowerOffVM_Task, 'duration_secs': 0.174147} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.918217] env[62383]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Powered off the VM {{(pid=62383) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1485.918406] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Unregistering the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1485.918652] env[62383]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba01a264-872e-494a-95d3-746dd276bea4 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.989162] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Unregistered the VM {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1485.989400] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Deleting contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1485.989557] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Deleting the datastore file [datastore2] d2465e54-944f-48a5-9174-4372353870ca {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1485.989821] env[62383]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fecb4820-c765-4f9f-a697-3c57337d19d1 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.997136] env[62383]: DEBUG oslo_vmware.api [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for the task: (returnval){ [ 1485.997136] env[62383]: value = "task-2452709" [ 1485.997136] env[62383]: _type = "Task" [ 1485.997136] env[62383]: } to complete. {{(pid=62383) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.003938] env[62383]: DEBUG oslo_vmware.api [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.507202] env[62383]: DEBUG oslo_vmware.api [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Task: {'id': task-2452709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140648} completed successfully. {{(pid=62383) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.507634] env[62383]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Deleted the datastore file {{(pid=62383) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1486.507634] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Deleted contents of the VM from datastore datastore2 {{(pid=62383) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1486.507887] env[62383]: DEBUG nova.virt.vmwareapi.vmops [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Instance destroyed {{(pid=62383) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1486.508102] env[62383]: INFO nova.compute.manager [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] [instance: d2465e54-944f-48a5-9174-4372353870ca] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1486.508364] env[62383]: DEBUG oslo.service.loopingcall [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62383) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1486.508554] env[62383]: DEBUG nova.compute.manager [-] [instance: d2465e54-944f-48a5-9174-4372353870ca] Deallocating network for instance {{(pid=62383) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1486.508648] env[62383]: DEBUG nova.network.neutron [-] [instance: d2465e54-944f-48a5-9174-4372353870ca] deallocate_for_instance() {{(pid=62383) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1486.995033] env[62383]: DEBUG nova.compute.manager [req-51ab7ad5-6ba4-442c-82c3-2bcc89713cee req-9b5e2a6e-6106-4fff-8fb6-a7af027c5a31 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Received event network-vif-deleted-72d9470f-9ba0-4da1-8a01-2228345eaaa0 {{(pid=62383) external_instance_event /opt/stack/nova/nova/compute/manager.py:11648}} [ 1486.995234] env[62383]: INFO nova.compute.manager [req-51ab7ad5-6ba4-442c-82c3-2bcc89713cee req-9b5e2a6e-6106-4fff-8fb6-a7af027c5a31 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Neutron deleted interface 72d9470f-9ba0-4da1-8a01-2228345eaaa0; detaching it from the instance and deleting it from the info cache [ 1486.995431] env[62383]: DEBUG nova.network.neutron [req-51ab7ad5-6ba4-442c-82c3-2bcc89713cee req-9b5e2a6e-6106-4fff-8fb6-a7af027c5a31 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.414299] env[62383]: DEBUG nova.network.neutron [-] [instance: d2465e54-944f-48a5-9174-4372353870ca] Updating instance_info_cache with network_info: [] {{(pid=62383) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.497720] env[62383]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ebe92550-76b6-49b3-9aa5-9a05ae8b47cc {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.510050] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe21108f-ac82-41c2-9a23-531c92cccae5 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.534029] env[62383]: DEBUG nova.compute.manager [req-51ab7ad5-6ba4-442c-82c3-2bcc89713cee req-9b5e2a6e-6106-4fff-8fb6-a7af027c5a31 service nova] [instance: d2465e54-944f-48a5-9174-4372353870ca] Detach interface failed, port_id=72d9470f-9ba0-4da1-8a01-2228345eaaa0, reason: Instance d2465e54-944f-48a5-9174-4372353870ca could not be found. {{(pid=62383) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11482}} [ 1487.916691] env[62383]: INFO nova.compute.manager [-] [instance: d2465e54-944f-48a5-9174-4372353870ca] Took 1.41 seconds to deallocate network for instance. [ 1488.424107] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.424107] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.424107] env[62383]: DEBUG nova.objects.instance [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lazy-loading 'resources' on Instance uuid d2465e54-944f-48a5-9174-4372353870ca {{(pid=62383) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.957503] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0321e2e1-ab81-4613-9f2a-8307d4902cab {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.964821] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d64b86-e6e8-43d3-b44a-9f65a2b99a8c {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.993446] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea022530-b376-4d5d-ad07-3dfab0becea6 {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.999830] env[62383]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ce590b-e833-4ffd-afa1-756d6f170ffe {{(pid=62383) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.012140] env[62383]: DEBUG nova.compute.provider_tree [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed in ProviderTree for provider: 60615f54-0557-436e-a486-87505bffb4c7 {{(pid=62383) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1489.515058] env[62383]: DEBUG nova.scheduler.client.report [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Inventory has not changed for provider 60615f54-0557-436e-a486-87505bffb4c7 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 146, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62383) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1490.019988] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.596s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.041100] env[62383]: INFO nova.scheduler.client.report [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Deleted allocations for instance d2465e54-944f-48a5-9174-4372353870ca [ 1490.548809] env[62383]: DEBUG oslo_concurrency.lockutils [None req-f9ae6c75-6bfe-48f4-a17e-5a9ab8e9f760 tempest-AttachVolumeTestJSON-1109327635 tempest-AttachVolumeTestJSON-1109327635-project-member] Lock "d2465e54-944f-48a5-9174-4372353870ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.663s {{(pid=62383) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}